summaryrefslogtreecommitdiffstats
path: root/chromium/third_party/webrtc/modules
diff options
context:
space:
mode:
authorJocelyn Turcotte <jocelyn.turcotte@digia.com>2014-08-08 14:30:41 +0200
committerJocelyn Turcotte <jocelyn.turcotte@digia.com>2014-08-12 13:49:54 +0200
commitab0a50979b9eb4dfa3320eff7e187e41efedf7a9 (patch)
tree498dfb8a97ff3361a9f7486863a52bb4e26bb898 /chromium/third_party/webrtc/modules
parent4ce69f7403811819800e7c5ae1318b2647e778d1 (diff)
Update Chromium to beta version 37.0.2062.68
Change-Id: I188e3b5aff1bec75566014291b654eb19f5bc8ca Reviewed-by: Andras Becsi <andras.becsi@digia.com>
Diffstat (limited to 'chromium/third_party/webrtc/modules')
-rw-r--r--chromium/third_party/webrtc/modules/OWNERS6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/cng/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/g711/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/g722/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c42
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h15
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/encode.c30
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h11
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks.c16
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_mips.c102
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_mips.c365
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c43
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.gypi24
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/lattice_mips.c327
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_mips.c133
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/structs.h3
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.S9
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c11
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c33
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c29
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h16
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/structs.h7
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h117
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus.gypi22
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc249
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.c174
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc119
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc62
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/tools/OWNERS6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc124
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h90
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_tests.gypi71
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_tests.isolate40
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/OWNERS1
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_g722.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.cc7
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h94
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.cc269
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.h87
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.cc41
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.h9
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.cc142
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.h40
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver_unittest.cc62
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.cc54
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.h11
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.cc20
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.gypi65
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.cc206
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h117
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_unittest.cc514
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/call_statistics.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.cc8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/acm2/nack_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/interface/audio_coding_module.h101
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/Android.mk67
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amr.cc430
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amr.h87
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amrwb.cc436
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amrwb.h90
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_celt.cc339
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_celt.h79
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_cng.cc150
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_cng.h73
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_codec_database.cc956
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_codec_database.h336
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.cc42
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.h42
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.cc171
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.h62
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g722.cc358
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g722.h84
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221.cc500
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221.h86
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221c.cc510
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221c.h94
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g729.cc366
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g729.h76
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7291.cc349
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7291.h72
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_generic_codec.cc1263
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_generic_codec.h1224
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_gsmfr.cc267
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_gsmfr.h71
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_ilbc.cc259
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_ilbc.h71
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac.cc903
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac.h138
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac_macros.h77
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq.cc1151
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq.h399
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq_unittest.cc153
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_opus.cc319
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_opus.h78
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcm16b.cc251
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcm16b.h67
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcma.cc134
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcma.h65
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcmu.cc136
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcmu.h65
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_red.cc108
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_red.h62
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_resampler.cc63
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_resampler.h40
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_speex.cc471
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/acm_speex.h86
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi153
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.cc3048
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h455
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/Android.mk73
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/OWNERS8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.c493
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/accelerate.cc)9
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/accelerate.h)21
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier.cc71
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier.h59
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier_unittest.cc75
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder.cc)14
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.cc)27
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.h)15
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_unittest.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_unittests.isolate (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_unittests.isolate)0
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector.cc)5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector.h)10
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector_unittest.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector.h)8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector_unittest.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/automode.c783
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/automode.h274
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise.cc)6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise.h)14
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise_unittest.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/bgn_update.c247
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter.h)8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter_unittest.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_stats.h95
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/bufstats_decision.c427
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/cng_internal.c155
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db.c782
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db.h128
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db_defines.h97
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise.cc)10
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise.h)10
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise_unittest.cc)6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/correlator.c132
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic.cc)20
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic.h)14
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_fax.cc)6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_fax.h)10
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_normal.cc)16
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_normal.h)51
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_unittest.cc)14
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database.cc)4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database.h)12
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database_unittest.cc)8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/defines.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/defines.h)6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_logging.h34
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager.cc)4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager.h)10
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager_unittest.cc)4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector.h)8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector_unittest.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dsp.c532
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dsp.h807
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper.h)10
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper_unittest.cc)4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.c120
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.h220
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.c232
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.h199
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer_unittest.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h)10
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator_unittest.cc)4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tonegen.c367
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tonegen.h73
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/expand.c1220
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/expand.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/expand.cc)261
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/expand.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/expand.h)80
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/expand_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/expand_unittest.cc)21
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/interface/audio_decoder.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h)19
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/interface/neteq.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/interface/neteq.h)53
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h230
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h454
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h336
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mcu.h300
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_address_init.c33
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_dsp_common.c45
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_dsp_common.h69
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_reset.c131
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/merge.c570
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/merge.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/merge.cc)30
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/merge.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/merge.h)28
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/merge_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/merge_unittest.cc)10
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/min_distortion.c55
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mix_voice_unvoice.c41
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_audio_decoder.h)8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_audio_vector.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_audio_vector.h)8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_buffer_level_filter.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_buffer_level_filter.h)8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h)8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_delay_manager.h)8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_delay_peak_detector.h)8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_buffer.h)8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_tone_generator.h)8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_external_decoder_pcm16b.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_external_decoder_pcm16b.h)10
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_packet_buffer.h)12
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_payload_splitter.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_payload_splitter.h)10
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/mute_signal.c33
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.cc62
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi328
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_defines.h374
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_error_codes.h81
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_external_decoder_unittest.cc)17
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl.cc)215
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl.h)236
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc498
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_statistics.h56
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_stereo_unittest.cc)13
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_tests.gypi (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_tests.gypi)78
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_unittest.cc)430
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittests.isolate44
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/normal.c279
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/normal.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/normal.cc)12
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/normal.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/normal.h)12
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/normal_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/normal_unittest.cc)12
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/packet.h)6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.c851
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer.cc)60
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h371
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer_unittest.cc)63
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter.cc)64
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter.h)19
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter_unittest.cc)89
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/peak_detection.c232
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad.h)14
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad_unittest.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.c527
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/preemptive_expand.cc)11
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/preemptive_expand.h)31
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.c54
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector.cc)4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector.h)8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector_unittest.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/recin.c531
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/recout.c1502
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.c134
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/rtcp.cc)5
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.h140
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/rtp.c240
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/rtp.h78
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/set_fs.c78
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/signal_mcu.c820
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/split_and_insert.c152
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/statistics_calculator.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/statistics_calculator.cc)6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/statistics_calculator.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/statistics_calculator.h)10
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer.h)13
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer_unittest.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch.cc)8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch.h)10
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch_unittest.cc52
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler.cc)6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler.h)10
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler_unittest.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler_unittest.cc)6
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_checksum.h60
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_loop.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/audio_loop.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_loop.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/audio_loop.h)13
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_sink.h46
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/input_audio_file.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/input_audio_file.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/input_audio_file.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/input_audio_file.h)8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc132
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.h32
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc115
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.h100
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/neteq_rtpplay.cc)223
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/output_audio_file.h50
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet.cc155
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet.h117
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet_source.h37
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet_unittest.cc202
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_analyze.cc147
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.cc147
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h67
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_generator.cc (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/rtp_generator.cc)2
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_generator.h (renamed from chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/rtp_generator.h)8
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/unmute_signal.c41
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/webrtc_neteq.c1769
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq/webrtc_neteq_unittest.cc778
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/OWNERS4
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer.h116
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq.cc52
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq.gypi220
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl_unittest.cc229
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer.h144
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/rtcp.h58
-rw-r--r--chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch_unittest.cc31
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc726
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h50
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_frame_manipulator.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_conference_mixer/source/memory_pool_posix.h34
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/Android.mk3
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/OWNERS7
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/audio_device_template.h16
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/opensles_input.cc10
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/opensles_input.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/opensles_output.cc11
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/opensles_output.h4
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.cc16
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/audio_device.gypi71
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/audio_device_buffer.cc7
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/audio_device_generic.h6
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc58
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/audio_device_tests.isolate12
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.cc4
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device.cc586
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device.h202
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc43
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.h41
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/include/audio_device.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/include/audio_device_defines.h41
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/include/fake_audio_device.h8
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.cc70
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc69
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h15
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc58
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h10
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.cc1
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.h13
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/main/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.cc38
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc54
-rw-r--r--chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.h2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/OWNERS6
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/Android.mk9
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.c102
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h31
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h19
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_mips.c774
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_neon.c304
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.c15
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h21
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.c1213
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.c58
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/include/echo_cancellation.h40
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc21
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.c3
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.c2
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.c29
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h44
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.c16
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.h1
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/agc/digital_agc.c11
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_buffer.cc496
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_buffer.h117
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi50
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc646
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h132
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc74
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/audio_processing_tests.gypi28
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/common.h76
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/debug.proto19
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.cc70
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.h25
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc31
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl_wrapper.h35
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc27
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.h11
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc43
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h11
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/gen_core_neon_offsets.gyp45
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/gen_core_neon_offsets_chromium.gyp45
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.cc20
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.h10
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h164
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/include/mock_audio_processing.h54
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.cc132
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.h12
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/lib_core_neon_offsets.gypi51
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc35
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.h12
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/include/noise_suppression.h8
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/noise_suppression.c4
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.c78
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.h8
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c279
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h37
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_c.c273
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_mips.c1008
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/processing_component.cc27
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/processing_component.h8
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/rms_level.cc61
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/rms_level.h57
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/splitting_filter.cc33
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/splitting_filter.h63
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/typing_detection.cc90
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/typing_detection.h93
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.c106
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.h74
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc143
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c67
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h87
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/utility/ring_buffer_unittest.cc11
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc27
-rw-r--r--chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.h10
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc287
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h87
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc211
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h8
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc304
-rw-r--r--chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h59
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc14
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc54
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi14
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.cc7
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.h33
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_types.h11
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h7
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_geometry.h4
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/differ.h2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.h12
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.mm64
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.cc91
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h66
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/osx_version.cc54
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/osx_version.h16
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mac/scoped_pixel_buffer_object.h2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.cc10
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.h10
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor.h3
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_mac.mm91
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_null.cc5
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_unittest.cc15
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc75
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_x11.cc8
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.cc1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h19
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.cc1
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm451
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc27
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_win.cc360
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc16
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/cursor.cc2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/cursor_unittest.cc6
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/desktop.h4
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/scoped_gdi_object.h2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/scoped_thread_desktop.h2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capture_utils.cc92
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capture_utils.h35
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc324
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h99
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc461
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h159
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.cc46
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.h25
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h12
-rw-r--r--[-rwxr-xr-x]chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.mm (renamed from chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.cc)61
-rwxr-xr-xchromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc6
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc44
-rwxr-xr-xchromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc90
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/x11/x_error_trap.h2
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.cc12
-rw-r--r--chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h3
-rw-r--r--chromium/third_party/webrtc/modules/interface/module_common_types.h71
-rw-r--r--chromium/third_party/webrtc/modules/media_file/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/media_file/source/avi_file.cc47
-rw-r--r--chromium/third_party/webrtc/modules/media_file/source/avi_file.h5
-rw-r--r--chromium/third_party/webrtc/modules/media_file/source/media_file_utility.cc1
-rw-r--r--chromium/third_party/webrtc/modules/modules.gyp120
-rw-r--r--chromium/third_party/webrtc/modules/modules_tests.isolate22
-rw-r--r--chromium/third_party/webrtc/modules/modules_unittests.isolate142
-rw-r--r--chromium/third_party/webrtc/modules/pacing/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/pacing/include/paced_sender.h22
-rw-r--r--chromium/third_party/webrtc/modules/pacing/paced_sender.cc39
-rw-r--r--chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc46
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/OWNERS6
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc162
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h32
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/rtp_to_ntp.h43
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc128
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.h21
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.cc2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.h2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi60
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc85
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc5
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc36
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h5
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc251
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.cc58
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.h2
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp.cc119
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp_unittest.cc163
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc74
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h36
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc116
-rw-r--r--chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc80
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/interface/fec_receiver.h2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/interface/receive_statistics.h39
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h50
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h8
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h42
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h12
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc17
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_test_helper.h3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc69
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.h4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc10
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc362
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h61
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc173
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc85
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc112
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet.cc695
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet.h726
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc592
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc98
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc1
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc107
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h7
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc38
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_fec_unittest.cc39
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h2
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h10
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc92
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h13
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc51
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc46
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc9
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc37
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc32
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp.gypi4
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc528
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h38
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc438
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc368
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h58
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc89
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h13
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc165
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc28
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h3
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc163
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility_unittest.cc18
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc5
-rw-r--r--chromium/third_party/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.h2
-rw-r--r--chromium/third_party/webrtc/modules/utility/interface/mock/mock_process_thread.h29
-rw-r--r--chromium/third_party/webrtc/modules/utility/interface/process_thread.h2
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/audio_frame_operations.cc1
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/audio_frame_operations_unittest.cc4
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/file_player_impl.cc117
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/file_player_unittests.cc106
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc165
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h32
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/frame_scaler.cc1
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc56
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h8
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/rtp_dump_impl.cc19
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/video_coder.cc5
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/video_coder.h2
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/video_frames_queue.cc80
-rw-r--r--chromium/third_party/webrtc/modules/utility/source/video_frames_queue.h8
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/OWNERS8
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/android/device_info_android.cc77
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/android/device_info_android.h11
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/android/video_capture_android.cc94
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/device_info_impl.cc29
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/ensure_initialized.cc63
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/ensure_initialized.h19
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/include/mock/mock_video_capture.h50
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/include/video_capture.h19
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios.mm4
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios_objc.mm4
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h (renamed from chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios_objc.h)17
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.mm (renamed from chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios_objc.mm)138
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.h4
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.mm13
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm7
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/video_capture.gypi28
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc67
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h15
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/video_capture_tests.isolate12
-rw-r--r--chromium/third_party/webrtc/modules/video_capture/windows/sink_filter_ds.cc2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/i420/main/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc17
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h1
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/test.h2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/unit_test.cc4
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/tools/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/tools/video_codecs_tools.gypi1
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8.gyp1
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_factory.cc24
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc24
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h4
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/interface/video_coding.h6
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/Android.mk3
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.cc66
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.h3
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.cc8
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.h2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/frame_buffer.cc24
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.cc35
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.h9
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.cc111
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.h23
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.cc121
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.h10
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc34
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/jitter_estimator.cc47
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.cc270
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.h136
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc32
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/packet.cc4
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/packet.h2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/receiver.cc54
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/receiver.h5
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/receiver_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.cc24
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.h4
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/session_info.cc6
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/session_info.h1
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/timestamp_extrapolator.cc248
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/timestamp_extrapolator.h63
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/timing.cc56
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/timing.h8
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/video_coding.gypi2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.cc53
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.h24
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_robustness_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_test.gypi1
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver.cc156
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver_unittest.cc2
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/video_sender.cc166
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/main/source/video_sender_unittest.cc7
-rw-r--r--chromium/third_party/webrtc/modules/video_coding/utility/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/interface/video_processing.h8
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/OWNERS5
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/brighten.cc7
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/brightness_detection.cc7
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/color_enhancement.cc60
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/deflickering.cc12
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/denoising.cc3
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.cc14
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.h4
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.cc31
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.h4
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.cc19
-rw-r--r--chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.h3
-rw-r--r--chromium/third_party/webrtc/modules/video_render/OWNERS7
-rw-r--r--chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc3
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/open_gles20.mm4
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.mm4
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.mm6
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.h2
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.mm13
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.h10
-rw-r--r--chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.mm52
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render.gypi25
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render_frames.cc124
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render_frames.h9
-rw-r--r--chromium/third_party/webrtc/modules/video_render/video_render_tests.isolate12
720 files changed, 25353 insertions, 46891 deletions
diff --git a/chromium/third_party/webrtc/modules/OWNERS b/chromium/third_party/webrtc/modules/OWNERS
new file mode 100644
index 00000000000..bbffda7e492
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/OWNERS
@@ -0,0 +1,6 @@
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/cng/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/g711/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/g722/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/ilbc/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c
index 5c23f7ab71e..c66be2e484a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c
@@ -67,9 +67,9 @@ int WebRtcIsacfix_EncHistMulti(Bitstr_enc *streamData,
W_upper_LSB = W_upper & 0x0000FFFF;
W_upper_MSB = WEBRTC_SPL_RSHIFT_W32(W_upper, 16);
W_lower = WEBRTC_SPL_UMUL(W_upper_MSB, cdfLo);
- W_lower += WEBRTC_SPL_UMUL_RSFT16(W_upper_LSB, cdfLo);
+ W_lower += ((W_upper_LSB * cdfLo) >> 16);
W_upper = WEBRTC_SPL_UMUL(W_upper_MSB, cdfHi);
- W_upper += WEBRTC_SPL_UMUL_RSFT16(W_upper_LSB, cdfHi);
+ W_upper += ((W_upper_LSB * cdfHi) >> 16);
/* shift interval such that it begins at zero */
W_upper -= ++W_lower;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c
index b540ed5eef2..9391fb3c1d0 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c
@@ -17,7 +17,6 @@
#include "arith_routins.h"
-
/* Tables for piecewise linear cdf functions: y = k*x */
/* x Points for function piecewise() in Q15 */
@@ -248,7 +247,7 @@ int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
int16_t envCount;
uint16_t tmpARSpecQ8 = 0;
int k, i;
-
+ int offset = 0;
/* point to beginning of stream buffer */
streamPtr = streamData->stream + streamData->stream_index;
@@ -304,7 +303,7 @@ int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
cdfTmp = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8));
W_tmp = WEBRTC_SPL_UMUL_16_16(cdfTmp, W_upper_MSB);
- W_tmp += WEBRTC_SPL_UMUL_16_16_RSFT16(cdfTmp, W_upper_LSB);
+ W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16;
if (streamVal > W_tmp)
{
@@ -313,7 +312,7 @@ int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
cdfTmp = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8));
W_tmp = WEBRTC_SPL_UMUL_16_16(cdfTmp, W_upper_MSB);
- W_tmp += WEBRTC_SPL_UMUL_16_16_RSFT16(cdfTmp, W_upper_LSB);
+ W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16;
while (streamVal > W_tmp)
{
@@ -323,7 +322,7 @@ int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8));
W_tmp = WEBRTC_SPL_UMUL_16_16(cdfTmp, W_upper_MSB);
- W_tmp += WEBRTC_SPL_UMUL_16_16_RSFT16(cdfTmp, W_upper_LSB);
+ W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16;
/* error check */
if (W_lower == W_tmp) {
@@ -342,7 +341,7 @@ int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
cdfTmp = WebRtcIsacfix_Piecewise(WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8));
W_tmp = WEBRTC_SPL_UMUL_16_16(cdfTmp, W_upper_MSB);
- W_tmp += WEBRTC_SPL_UMUL_16_16_RSFT16(cdfTmp, W_upper_LSB);
+ W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16;
while ( !(streamVal > W_tmp) )
{
@@ -352,7 +351,7 @@ int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
WEBRTC_SPL_MUL_16_U16(candQ7, tmpARSpecQ8));
W_tmp = WEBRTC_SPL_UMUL_16_16(cdfTmp, W_upper_MSB);
- W_tmp += WEBRTC_SPL_UMUL_16_16_RSFT16(cdfTmp, W_upper_LSB);
+ W_tmp += ((uint32_t)cdfTmp * (uint32_t)W_upper_LSB) >> 16;
/* error check */
if (W_upper == W_tmp){
@@ -377,14 +376,27 @@ int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
* W_upper < 2^24 */
while ( !(W_upper & 0xFF000000) )
{
- /* read next byte from stream */
- if (streamData->full == 0) {
- streamVal = WEBRTC_SPL_LSHIFT_W32(streamVal, 8) | (*streamPtr++ & 0x00FF);
- streamData->full = 1;
+ if (streamPtr < streamData->stream + streamData->stream_size) {
+ /* read next byte from stream */
+ if (streamData->full == 0) {
+ streamVal = WEBRTC_SPL_LSHIFT_W32(streamVal, 8) | (*streamPtr++ & 0x00FF);
+ streamData->full = 1;
+ } else {
+ streamVal = WEBRTC_SPL_LSHIFT_W32(streamVal, 8) |
+ ((*streamPtr) >> 8);
+ streamData->full = 0;
+ }
} else {
- streamVal = WEBRTC_SPL_LSHIFT_W32(streamVal, 8) |
- WEBRTC_SPL_RSHIFT_U16(*streamPtr, 8);
- streamData->full = 0;
+ /* Intending to read outside the stream. This can happen for the last
+ * two or three bytes. It is how the algorithm is implemented. Do
+ * not read from the bit stream and insert zeros instead. */
+ streamVal = WEBRTC_SPL_LSHIFT_W32(streamVal, 8);
+ if (streamData->full == 0) {
+ offset++; // We would have incremented the pointer in this case.
+ streamData->full = 1;
+ } else {
+ streamData->full = 0;
+ }
}
W_upper = WEBRTC_SPL_LSHIFT_W32(W_upper, 8);
}
@@ -392,7 +404,7 @@ int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
envCount++;
}
- streamData->stream_index = streamPtr - streamData->stream;
+ streamData->stream_index = streamPtr + offset - streamData->stream;
streamData->W_upper = W_upper;
streamData->streamval = streamVal;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h
index 88c7e1abe84..2f649324e77 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h
@@ -179,6 +179,21 @@ void WebRtcIsacfix_FilterMaLoopNeon(int16_t input0,
int32_t* ptr2);
#endif
+#if defined(MIPS32_LE)
+int WebRtcIsacfix_AutocorrMIPS(int32_t* __restrict r,
+ const int16_t* __restrict x,
+ int16_t N,
+ int16_t order,
+ int16_t* __restrict scale);
+
+void WebRtcIsacfix_FilterMaLoopMIPS(int16_t input0,
+ int16_t input1,
+ int32_t input2,
+ int32_t* ptr0,
+ int32_t* ptr1,
+ int32_t* ptr2);
+#endif
+
/* Function pointers associated with the above functions. */
typedef int (*AutocorrFix)(int32_t* __restrict r,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c
index 6bccb8c8393..6bd5843cd97 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c
@@ -326,7 +326,7 @@ int16_t WebRtcIsacfix_DecodePlcImpl(int16_t *signal_out16,
for( k = 0; k < lag0; k++ )
{
corr = WEBRTC_SPL_ADD_SAT_W32( corr, WEBRTC_SPL_ABS_W32(
- WEBRTC_SPL_SUB_SAT_W16(
+ WebRtcSpl_SubSatW16(
(ISACdec_obj->plcstr_obj).lastPitchLP[k],
(ISACdec_obj->plcstr_obj).prevPitchInvIn[
FRAMESAMPLES_HALF - 2*lag0 - 10 + i + k ] ) ) );
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/encode.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/encode.c
index e209c0ee59a..daf0d629993 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/encode.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/encode.c
@@ -15,18 +15,21 @@
*
*/
-#include "arith_routins.h"
-#include "bandwidth_estimator.h"
-#include "codec.h"
-#include "pitch_gain_tables.h"
-#include "pitch_lag_tables.h"
-#include "entropy_coding.h"
-#include "lpc_tables.h"
-#include "lpc_masking_model.h"
-#include "pitch_estimator.h"
-#include "structs.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h"
+
+#include <assert.h>
#include <stdio.h>
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routins.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h"
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/structs.h"
+
int WebRtcIsacfix_EncodeImpl(int16_t *in,
ISACFIX_EncInst_t *ISACenc_obj,
@@ -450,12 +453,14 @@ int WebRtcIsacfix_EncodeImpl(int16_t *in,
while (stream_length < MinBytes)
{
+ assert(stream_length >= 0);
if (stream_length & 0x0001){
ISACenc_obj->bitstr_seed = WEBRTC_SPL_RAND( ISACenc_obj->bitstr_seed );
ISACenc_obj->bitstr_obj.stream[ WEBRTC_SPL_RSHIFT_W16(stream_length, 1) ] |= (uint16_t)(ISACenc_obj->bitstr_seed & 0xFF);
} else {
ISACenc_obj->bitstr_seed = WEBRTC_SPL_RAND( ISACenc_obj->bitstr_seed );
- ISACenc_obj->bitstr_obj.stream[ WEBRTC_SPL_RSHIFT_W16(stream_length, 1) ] = WEBRTC_SPL_LSHIFT_U16(ISACenc_obj->bitstr_seed, 8);
+ ISACenc_obj->bitstr_obj.stream[stream_length / 2] =
+ ((uint16_t)ISACenc_obj->bitstr_seed << 8);
}
stream_length++;
}
@@ -467,7 +472,8 @@ int WebRtcIsacfix_EncodeImpl(int16_t *in,
}
else {
ISACenc_obj->bitstr_obj.stream[usefulstr_len>>1] &= 0x00FF;
- ISACenc_obj->bitstr_obj.stream[usefulstr_len>>1] += WEBRTC_SPL_LSHIFT_U16((MinBytes - usefulstr_len) & 0x00FF, 8);
+ ISACenc_obj->bitstr_obj.stream[usefulstr_len >> 1] +=
+ ((uint16_t)((MinBytes - usefulstr_len) & 0x00FF) << 8);
}
}
else
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h
index 28d10357245..3fefc1a5dcc 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h
@@ -58,6 +58,17 @@ void WebRtcIsacfix_AllpassFilter2FixDec16Neon(
int32_t *filter_state_ch2);
#endif
+#if defined(MIPS_DSP_R1_LE)
+void WebRtcIsacfix_AllpassFilter2FixDec16MIPS(
+ int16_t *data_ch1,
+ int16_t *data_ch2,
+ const int16_t *factor_ch1,
+ const int16_t *factor_ch2,
+ const int length,
+ int32_t *filter_state_ch1,
+ int32_t *filter_state_ch2);
+#endif
+
#if defined(__cplusplus) || defined(c_plusplus)
}
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks.c
index 9c9d098aeef..64557e132d0 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks.c
@@ -102,8 +102,8 @@ void WebRtcIsacfix_HighpassFilterFixDec32(int16_t *io,
#ifdef WEBRTC_ARCH_ARM_V7
{
- int tmp_coeff0 = 0;
- int tmp_coeff1 = 0;
+ register int tmp_coeff0;
+ register int tmp_coeff1;
__asm __volatile(
"ldr %[tmp_coeff0], [%[coeff]]\n\t"
"ldr %[tmp_coeff1], [%[coeff], #4]\n\t"
@@ -113,12 +113,12 @@ void WebRtcIsacfix_HighpassFilterFixDec32(int16_t *io,
"ldr %[tmp_coeff1], [%[coeff], #12]\n\t"
"smmulr %[a1], %[tmp_coeff0], %[state0]\n\t"
"smmulr %[b1], %[tmp_coeff1], %[state1]\n\t"
- :[a2]"+r"(a2),
- [b2]"+r"(b2),
- [a1]"+r"(a1),
- [b1]"+r"(b1),
- [tmp_coeff0]"+r"(tmp_coeff0),
- [tmp_coeff1]"+r"(tmp_coeff1)
+ :[a2]"=&r"(a2),
+ [b2]"=&r"(b2),
+ [a1]"=&r"(a1),
+ [b1]"=r"(b1),
+ [tmp_coeff0]"=&r"(tmp_coeff0),
+ [tmp_coeff1]"=&r"(tmp_coeff1)
:[coeff]"r"(coefficient),
[state0]"r"(state0),
[state1]"r"(state1)
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_mips.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_mips.c
new file mode 100644
index 00000000000..1887745b7c0
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks_mips.c
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h"
+
+// WebRtcIsacfix_AllpassFilter2FixDec16 function optimized for MIPSDSP platform
+// Bit-exact with WebRtcIsacfix_AllpassFilter2FixDec16C from filterbanks.c
+void WebRtcIsacfix_AllpassFilter2FixDec16MIPS(
+ int16_t *data_ch1, // Input and output in channel 1, in Q0
+ int16_t *data_ch2, // Input and output in channel 2, in Q0
+ const int16_t *factor_ch1, // Scaling factor for channel 1, in Q15
+ const int16_t *factor_ch2, // Scaling factor for channel 2, in Q15
+ const int length, // Length of the data buffers
+ int32_t *filter_state_ch1, // Filter state for channel 1, in Q16
+ int32_t *filter_state_ch2) { // Filter state for channel 2, in Q16
+
+ int32_t st0_ch1, st1_ch1; // channel1 state variables
+ int32_t st0_ch2, st1_ch2; // channel2 state variables
+ int32_t f_ch10, f_ch11, f_ch20, f_ch21; // factor variables
+ int32_t r0, r1, r2, r3, r4, r5; // temporary ragister variables
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ // Load all the state and factor variables
+ "lh %[f_ch10], 0(%[factor_ch1]) \n\t"
+ "lh %[f_ch20], 0(%[factor_ch2]) \n\t"
+ "lh %[f_ch11], 2(%[factor_ch1]) \n\t"
+ "lh %[f_ch21], 2(%[factor_ch2]) \n\t"
+ "lw %[st0_ch1], 0(%[filter_state_ch1]) \n\t"
+ "lw %[st1_ch1], 4(%[filter_state_ch1]) \n\t"
+ "lw %[st0_ch2], 0(%[filter_state_ch2]) \n\t"
+ "lw %[st1_ch2], 4(%[filter_state_ch2]) \n\t"
+ // Allpass filtering loop
+ "1: \n\t"
+ "lh %[r0], 0(%[data_ch1]) \n\t"
+ "lh %[r1], 0(%[data_ch2]) \n\t"
+ "addiu %[length], %[length], -1 \n\t"
+ "mul %[r2], %[r0], %[f_ch10] \n\t"
+ "mul %[r3], %[r1], %[f_ch20] \n\t"
+ "sll %[r0], %[r0], 16 \n\t"
+ "sll %[r1], %[r1], 16 \n\t"
+ "sll %[r2], %[r2], 1 \n\t"
+ "addq_s.w %[r2], %[r2], %[st0_ch1] \n\t"
+ "sll %[r3], %[r3], 1 \n\t"
+ "addq_s.w %[r3], %[r3], %[st0_ch2] \n\t"
+ "sra %[r2], %[r2], 16 \n\t"
+ "mul %[st0_ch1], %[f_ch10], %[r2] \n\t"
+ "sra %[r3], %[r3], 16 \n\t"
+ "mul %[st0_ch2], %[f_ch20], %[r3] \n\t"
+ "mul %[r4], %[r2], %[f_ch11] \n\t"
+ "mul %[r5], %[r3], %[f_ch21] \n\t"
+ "sll %[st0_ch1], %[st0_ch1], 1 \n\t"
+ "subq_s.w %[st0_ch1], %[r0], %[st0_ch1] \n\t"
+ "sll %[st0_ch2], %[st0_ch2], 1 \n\t"
+ "subq_s.w %[st0_ch2], %[r1], %[st0_ch2] \n\t"
+ "sll %[r4], %[r4], 1 \n\t"
+ "addq_s.w %[r4], %[r4], %[st1_ch1] \n\t"
+ "sll %[r5], %[r5], 1 \n\t"
+ "addq_s.w %[r5], %[r5], %[st1_ch2] \n\t"
+ "sra %[r4], %[r4], 16 \n\t"
+ "mul %[r0], %[r4], %[f_ch11] \n\t"
+ "sra %[r5], %[r5], 16 \n\t"
+ "mul %[r1], %[r5], %[f_ch21] \n\t"
+ "sh %[r4], 0(%[data_ch1]) \n\t"
+ "sh %[r5], 0(%[data_ch2]) \n\t"
+ "addiu %[data_ch1], %[data_ch1], 2 \n\t"
+ "sll %[r2], %[r2], 16 \n\t"
+ "sll %[r0], %[r0], 1 \n\t"
+ "subq_s.w %[st1_ch1], %[r2], %[r0] \n\t"
+ "sll %[r3], %[r3], 16 \n\t"
+ "sll %[r1], %[r1], 1 \n\t"
+ "subq_s.w %[st1_ch2], %[r3], %[r1] \n\t"
+ "bgtz %[length], 1b \n\t"
+ " addiu %[data_ch2], %[data_ch2], 2 \n\t"
+ // Store channel states
+ "sw %[st0_ch1], 0(%[filter_state_ch1]) \n\t"
+ "sw %[st1_ch1], 4(%[filter_state_ch1]) \n\t"
+ "sw %[st0_ch2], 0(%[filter_state_ch2]) \n\t"
+ "sw %[st1_ch2], 4(%[filter_state_ch2]) \n\t"
+ ".set pop \n\t"
+ : [f_ch10] "=&r" (f_ch10), [f_ch20] "=&r" (f_ch20),
+ [f_ch11] "=&r" (f_ch11), [f_ch21] "=&r" (f_ch21),
+ [st0_ch1] "=&r" (st0_ch1), [st1_ch1] "=&r" (st1_ch1),
+ [st0_ch2] "=&r" (st0_ch2), [st1_ch2] "=&r" (st1_ch2),
+ [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2),
+ [r3] "=&r" (r3), [r4] "=&r" (r4), [r5] "=&r" (r5)
+ : [factor_ch1] "r" (factor_ch1), [factor_ch2] "r" (factor_ch2),
+ [filter_state_ch1] "r" (filter_state_ch1),
+ [filter_state_ch2] "r" (filter_state_ch2),
+ [data_ch1] "r" (data_ch1), [data_ch2] "r" (data_ch2),
+ [length] "r" (length)
+ : "memory", "hi", "lo"
+ );
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_mips.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_mips.c
new file mode 100644
index 00000000000..056dc275d39
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_mips.c
@@ -0,0 +1,365 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h"
+
+// MIPS optimized implementation of the Autocorrelation function in fixed point.
+// NOTE! Different from SPLIB-version in how it scales the signal.
+int WebRtcIsacfix_AutocorrMIPS(int32_t* __restrict r,
+ const int16_t* __restrict x,
+ int16_t N,
+ int16_t order,
+ int16_t* __restrict scale) {
+ int i = 0;
+ int16_t scaling = 0;
+ int16_t* in = (int16_t*)x;
+ int loop_size = (int)(N >> 3);
+ int count = (int)(N & 7);
+ // Declare temporary variables used as registry values.
+ int32_t r0, r1, r2, r3;
+#if !defined(MIPS_DSP_R2_LE)
+ // For non-DSPR2 optimizations 4 more registers are used.
+ int32_t r4, r5, r6, r7;
+#endif
+
+ // Calculate r[0] and scaling needed.
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "mult $0, $0 \n\t"
+ // Loop is unrolled 8 times, set accumulator to zero in branch delay slot.
+ "beqz %[loop_size], 2f \n\t"
+ " mult $0, $0 \n\t"
+ "1: \n\t"
+ // Load 8 samples per loop iteration.
+#if defined(MIPS_DSP_R2_LE)
+ "ulw %[r0], 0(%[in]) \n\t"
+ "ulw %[r1], 4(%[in]) \n\t"
+ "ulw %[r2], 8(%[in]) \n\t"
+ "ulw %[r3], 12(%[in]) \n\t"
+#else
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 2(%[in]) \n\t"
+ "lh %[r2], 4(%[in]) \n\t"
+ "lh %[r3], 6(%[in]) \n\t"
+ "lh %[r4], 8(%[in]) \n\t"
+ "lh %[r5], 10(%[in]) \n\t"
+ "lh %[r6], 12(%[in]) \n\t"
+ "lh %[r7], 14(%[in]) \n\t"
+#endif
+ "addiu %[loop_size], %[loop_size], -1 \n\t"
+ // Multiply and accumulate.
+#if defined(MIPS_DSP_R2_LE)
+ "dpa.w.ph $ac0, %[r0], %[r0] \n\t"
+ "dpa.w.ph $ac0, %[r1], %[r1] \n\t"
+ "dpa.w.ph $ac0, %[r2], %[r2] \n\t"
+ "dpa.w.ph $ac0, %[r3], %[r3] \n\t"
+#else
+ "madd %[r0], %[r0] \n\t"
+ "madd %[r1], %[r1] \n\t"
+ "madd %[r2], %[r2] \n\t"
+ "madd %[r3], %[r3] \n\t"
+ "madd %[r4], %[r4] \n\t"
+ "madd %[r5], %[r5] \n\t"
+ "madd %[r6], %[r6] \n\t"
+ "madd %[r7], %[r7] \n\t"
+#endif
+ "bnez %[loop_size], 1b \n\t"
+ " addiu %[in], %[in], 16 \n\t"
+ "2: \n\t"
+ "beqz %[count], 4f \n\t"
+#if defined(MIPS_DSP_R1_LE)
+ " extr.w %[r0], $ac0, 31 \n\t"
+#else
+ " mfhi %[r2] \n\t"
+#endif
+ // Process remaining samples (if any).
+ "3: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "madd %[r0], %[r0] \n\t"
+ "bnez %[count], 3b \n\t"
+ " addiu %[in], %[in], 2 \n\t"
+#if defined(MIPS_DSP_R1_LE)
+ "extr.w %[r0], $ac0, 31 \n\t"
+#else
+ "mfhi %[r2] \n\t"
+#endif
+ "4: \n\t"
+#if !defined(MIPS_DSP_R1_LE)
+ "mflo %[r3] \n\t"
+ "sll %[r0], %[r2], 1 \n\t"
+ "srl %[r1], %[r3], 31 \n\t"
+ "addu %[r0], %[r0], %[r1] \n\t"
+#endif
+ // Calculate scaling (the value of shifting).
+ "clz %[r1], %[r0] \n\t"
+ "addiu %[r1], %[r1], -32 \n\t"
+ "subu %[scaling], $0, %[r1] \n\t"
+ "slti %[r1], %[r0], 0x1 \n\t"
+ "movn %[scaling], $0, %[r1] \n\t"
+#if defined(MIPS_DSP_R1_LE)
+ "extrv.w %[r0], $ac0, %[scaling] \n\t"
+ "mfhi %[r2], $ac0 \n\t"
+#else
+ "addiu %[r1], %[scaling], -32 \n\t"
+ "subu %[r1], $0, %[r1] \n\t"
+ "sllv %[r1], %[r2], %[r1] \n\t"
+ "srlv %[r0], %[r3], %[scaling] \n\t"
+ "addu %[r0], %[r0], %[r1] \n\t"
+#endif
+ "slti %[r1], %[scaling], 32 \n\t"
+ "movz %[r0], %[r2], %[r1] \n\t"
+ ".set pop \n\t"
+ : [loop_size] "+r" (loop_size), [in] "+r" (in), [r0] "=&r" (r0),
+ [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3),
+#if !defined(MIPS_DSP_R2_LE)
+ [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7),
+#endif
+ [count] "+r" (count), [scaling] "=r" (scaling)
+ : [N] "r" (N)
+ : "memory", "hi", "lo"
+ );
+ r[0] = r0;
+
+ // Correlation calculation is divided in 3 cases depending on the scaling
+ // value (different accumulator manipulation needed). Three slightly different
+ // loops are written in order to avoid branches inside the loop.
+ if (scaling == 0) {
+ // In this case, the result will be in low part of the accumulator.
+ for (i = 1; i < order + 1; i++) {
+ in = (int16_t*)x;
+ int16_t* in1 = (int16_t*)x + i;
+ count = N - i;
+ loop_size = (count) >> 2;
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "mult $0, $0 \n\t"
+ "beqz %[loop_size], 2f \n\t"
+ " andi %[count], %[count], 0x3 \n\t"
+ // Loop processing 4 pairs of samples per iteration.
+ "1: \n\t"
+#if defined(MIPS_DSP_R2_LE)
+ "ulw %[r0], 0(%[in]) \n\t"
+ "ulw %[r1], 0(%[in1]) \n\t"
+ "ulw %[r2], 4(%[in]) \n\t"
+ "ulw %[r3], 4(%[in1]) \n\t"
+#else
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 0(%[in1]) \n\t"
+ "lh %[r2], 2(%[in]) \n\t"
+ "lh %[r3], 2(%[in1]) \n\t"
+ "lh %[r4], 4(%[in]) \n\t"
+ "lh %[r5], 4(%[in1]) \n\t"
+ "lh %[r6], 6(%[in]) \n\t"
+ "lh %[r7], 6(%[in1]) \n\t"
+#endif
+ "addiu %[loop_size], %[loop_size], -1 \n\t"
+#if defined(MIPS_DSP_R2_LE)
+ "dpa.w.ph $ac0, %[r0], %[r1] \n\t"
+ "dpa.w.ph $ac0, %[r2], %[r3] \n\t"
+#else
+ "madd %[r0], %[r1] \n\t"
+ "madd %[r2], %[r3] \n\t"
+ "madd %[r4], %[r5] \n\t"
+ "madd %[r6], %[r7] \n\t"
+#endif
+ "addiu %[in], %[in], 8 \n\t"
+ "bnez %[loop_size], 1b \n\t"
+ " addiu %[in1], %[in1], 8 \n\t"
+ "2: \n\t"
+ "beqz %[count], 4f \n\t"
+ " mflo %[r0] \n\t"
+ // Process remaining samples (if any).
+ "3: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 0(%[in1]) \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "addiu %[in], %[in], 2 \n\t"
+ "madd %[r0], %[r1] \n\t"
+ "bnez %[count], 3b \n\t"
+ " addiu %[in1], %[in1], 2 \n\t"
+ "mflo %[r0] \n\t"
+ "4: \n\t"
+ ".set pop \n\t"
+ : [loop_size] "+r" (loop_size), [in] "+r" (in), [in1] "+r" (in1),
+#if !defined(MIPS_DSP_R2_LE)
+ [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7),
+#endif
+ [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3),
+ [count] "+r" (count)
+ :
+ : "memory", "hi", "lo"
+ );
+ r[i] = r0;
+ }
+ } else if (scaling == 32) {
+ // In this case, the result will be high part of the accumulator.
+ for (i = 1; i < order + 1; i++) {
+ in = (int16_t*)x;
+ int16_t* in1 = (int16_t*)x + i;
+ count = N - i;
+ loop_size = (count) >> 2;
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "mult $0, $0 \n\t"
+ "beqz %[loop_size], 2f \n\t"
+ " andi %[count], %[count], 0x3 \n\t"
+ // Loop processing 4 pairs of samples per iteration.
+ "1: \n\t"
+#if defined(MIPS_DSP_R2_LE)
+ "ulw %[r0], 0(%[in]) \n\t"
+ "ulw %[r1], 0(%[in1]) \n\t"
+ "ulw %[r2], 4(%[in]) \n\t"
+ "ulw %[r3], 4(%[in1]) \n\t"
+#else
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 0(%[in1]) \n\t"
+ "lh %[r2], 2(%[in]) \n\t"
+ "lh %[r3], 2(%[in1]) \n\t"
+ "lh %[r4], 4(%[in]) \n\t"
+ "lh %[r5], 4(%[in1]) \n\t"
+ "lh %[r6], 6(%[in]) \n\t"
+ "lh %[r7], 6(%[in1]) \n\t"
+#endif
+ "addiu %[loop_size], %[loop_size], -1 \n\t"
+#if defined(MIPS_DSP_R2_LE)
+ "dpa.w.ph $ac0, %[r0], %[r1] \n\t"
+ "dpa.w.ph $ac0, %[r2], %[r3] \n\t"
+#else
+ "madd %[r0], %[r1] \n\t"
+ "madd %[r2], %[r3] \n\t"
+ "madd %[r4], %[r5] \n\t"
+ "madd %[r6], %[r7] \n\t"
+#endif
+ "addiu %[in], %[in], 8 \n\t"
+ "bnez %[loop_size], 1b \n\t"
+ " addiu %[in1], %[in1], 8 \n\t"
+ "2: \n\t"
+ "beqz %[count], 4f \n\t"
+ " mfhi %[r0] \n\t"
+ // Process remaining samples (if any).
+ "3: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 0(%[in1]) \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "addiu %[in], %[in], 2 \n\t"
+ "madd %[r0], %[r1] \n\t"
+ "bnez %[count], 3b \n\t"
+ " addiu %[in1], %[in1], 2 \n\t"
+ "mfhi %[r0] \n\t"
+ "4: \n\t"
+ ".set pop \n\t"
+ : [loop_size] "+r" (loop_size), [in] "+r" (in), [in1] "+r" (in1),
+#if !defined(MIPS_DSP_R2_LE)
+ [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7),
+#endif
+ [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3),
+ [count] "+r" (count)
+ :
+ : "memory", "hi", "lo"
+ );
+ r[i] = r0;
+ }
+ } else {
+ // In this case, the result is obtained by combining low and high parts
+ // of the accumulator.
+#if !defined(MIPS_DSP_R1_LE)
+ int32_t tmp_shift = 32 - scaling;
+#endif
+ for (i = 1; i < order + 1; i++) {
+ in = (int16_t*)x;
+ int16_t* in1 = (int16_t*)x + i;
+ count = N - i;
+ loop_size = (count) >> 2;
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "mult $0, $0 \n\t"
+ "beqz %[loop_size], 2f \n\t"
+ " andi %[count], %[count], 0x3 \n\t"
+ "1: \n\t"
+#if defined(MIPS_DSP_R2_LE)
+ "ulw %[r0], 0(%[in]) \n\t"
+ "ulw %[r1], 0(%[in1]) \n\t"
+ "ulw %[r2], 4(%[in]) \n\t"
+ "ulw %[r3], 4(%[in1]) \n\t"
+#else
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 0(%[in1]) \n\t"
+ "lh %[r2], 2(%[in]) \n\t"
+ "lh %[r3], 2(%[in1]) \n\t"
+ "lh %[r4], 4(%[in]) \n\t"
+ "lh %[r5], 4(%[in1]) \n\t"
+ "lh %[r6], 6(%[in]) \n\t"
+ "lh %[r7], 6(%[in1]) \n\t"
+#endif
+ "addiu %[loop_size], %[loop_size], -1 \n\t"
+#if defined(MIPS_DSP_R2_LE)
+ "dpa.w.ph $ac0, %[r0], %[r1] \n\t"
+ "dpa.w.ph $ac0, %[r2], %[r3] \n\t"
+#else
+ "madd %[r0], %[r1] \n\t"
+ "madd %[r2], %[r3] \n\t"
+ "madd %[r4], %[r5] \n\t"
+ "madd %[r6], %[r7] \n\t"
+#endif
+ "addiu %[in], %[in], 8 \n\t"
+ "bnez %[loop_size], 1b \n\t"
+ " addiu %[in1], %[in1], 8 \n\t"
+ "2: \n\t"
+ "beqz %[count], 4f \n\t"
+#if defined(MIPS_DSP_R1_LE)
+ " extrv.w %[r0], $ac0, %[scaling] \n\t"
+#else
+ " mfhi %[r0] \n\t"
+#endif
+ "3: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 0(%[in1]) \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "addiu %[in], %[in], 2 \n\t"
+ "madd %[r0], %[r1] \n\t"
+ "bnez %[count], 3b \n\t"
+ " addiu %[in1], %[in1], 2 \n\t"
+#if defined(MIPS_DSP_R1_LE)
+ "extrv.w %[r0], $ac0, %[scaling] \n\t"
+#else
+ "mfhi %[r0] \n\t"
+#endif
+ "4: \n\t"
+#if !defined(MIPS_DSP_R1_LE)
+ "mflo %[r1] \n\t"
+ "sllv %[r0], %[r0], %[tmp_shift] \n\t"
+ "srlv %[r1], %[r1], %[scaling] \n\t"
+ "addu %[r0], %[r0], %[r1] \n\t"
+#endif
+ ".set pop \n\t"
+ : [loop_size] "+r" (loop_size), [in] "+r" (in), [in1] "+r" (in1),
+#if !defined(MIPS_DSP_R2_LE)
+ [r4] "=&r" (r4), [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7),
+#endif
+ [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3),
+ [count] "+r" (count)
+ : [scaling] "r" (scaling)
+#if !defined(MIPS_DSP_R1_LE)
+ , [tmp_shift] "r" (tmp_shift)
+#endif
+ : "memory", "hi", "lo"
+ );
+ r[i] = r0;
+ }
+ }
+ *scale = scaling;
+
+ return (order + 1);
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c
index 8baa30738f6..7635908094e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c
@@ -179,7 +179,7 @@ int16_t WebRtcIsacfix_FreeInternal(ISACFIX_MainStruct *ISAC_main_inst)
}
/****************************************************************************
- * WebRtcAecm_InitNeon(...)
+ * WebRtcIsacfix_InitNeon(...)
*
* This function initializes function pointers for ARM Neon platform.
*/
@@ -200,6 +200,23 @@ static void WebRtcIsacfix_InitNeon(void) {
#endif
/****************************************************************************
+ * WebRtcIsacfix_InitMIPS(...)
+ *
+ * This function initializes function pointers for MIPS platform.
+ */
+
+#if defined(MIPS32_LE)
+static void WebRtcIsacfix_InitMIPS(void) {
+ WebRtcIsacfix_AutocorrFix = WebRtcIsacfix_AutocorrMIPS;
+ WebRtcIsacfix_FilterMaLoopFix = WebRtcIsacfix_FilterMaLoopMIPS;
+#if defined(MIPS_DSP_R1_LE)
+ WebRtcIsacfix_AllpassFilter2FixDec16 =
+ WebRtcIsacfix_AllpassFilter2FixDec16MIPS;
+#endif
+}
+#endif
+
+/****************************************************************************
* WebRtcIsacfix_EncoderInit(...)
*
* This function initializes a ISAC instance prior to the encoder calls.
@@ -296,6 +313,10 @@ int16_t WebRtcIsacfix_EncoderInit(ISACFIX_MainStruct *ISAC_main_inst,
WebRtcIsacfix_InitNeon();
#endif
+#if defined(MIPS32_LE)
+ WebRtcIsacfix_InitMIPS();
+#endif
+
return statusInit;
}
@@ -587,15 +608,11 @@ int16_t WebRtcIsacfix_UpdateBwEstimate1(ISACFIX_MainStruct *ISAC_main_inst,
{
ISACFIX_SubStruct *ISAC_inst;
Bitstr_dec streamdata;
- uint16_t partOfStream[5];
#ifndef WEBRTC_ARCH_BIG_ENDIAN
int k;
#endif
int16_t err;
- /* Set stream pointer to point at partOfStream */
- streamdata.stream = (uint16_t *)partOfStream;
-
/* typecast pointer to real structure */
ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
@@ -675,15 +692,11 @@ int16_t WebRtcIsacfix_UpdateBwEstimate(ISACFIX_MainStruct *ISAC_main_inst,
{
ISACFIX_SubStruct *ISAC_inst;
Bitstr_dec streamdata;
- uint16_t partOfStream[5];
#ifndef WEBRTC_ARCH_BIG_ENDIAN
int k;
#endif
int16_t err;
- /* Set stream pointer to point at partOfStream */
- streamdata.stream = (uint16_t *)partOfStream;
-
/* typecast pointer to real structure */
ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
@@ -790,7 +803,7 @@ int16_t WebRtcIsacfix_Decode(ISACFIX_MainStruct *ISAC_main_inst,
return -1;
}
- (ISAC_inst->ISACdec_obj.bitstr_obj).stream = (uint16_t *)encoded;
+ ISAC_inst->ISACdec_obj.bitstr_obj.stream_size = (len + 1) >> 1;
/* convert bitstream from int16_t to bytes */
#ifndef WEBRTC_ARCH_BIG_ENDIAN
@@ -891,7 +904,7 @@ int16_t WebRtcIsacfix_DecodeNb(ISACFIX_MainStruct *ISAC_main_inst,
return -1;
}
- (ISAC_inst->ISACdec_obj.bitstr_obj).stream = (uint16_t *)encoded;
+ ISAC_inst->ISACdec_obj.bitstr_obj.stream_size = (len + 1) >> 1;
/* convert bitstream from int16_t to bytes */
#ifndef WEBRTC_ARCH_BIG_ENDIAN
@@ -1266,15 +1279,11 @@ int16_t WebRtcIsacfix_ReadFrameLen(const int16_t* encoded,
int16_t* frameLength)
{
Bitstr_dec streamdata;
- uint16_t partOfStream[5];
#ifndef WEBRTC_ARCH_BIG_ENDIAN
int k;
#endif
int16_t err;
- /* Set stream pointer to point at partOfStream */
- streamdata.stream = (uint16_t *)partOfStream;
-
streamdata.W_upper = 0xFFFFFFFF;
streamdata.streamval = 0;
streamdata.stream_index = 0;
@@ -1315,15 +1324,11 @@ int16_t WebRtcIsacfix_ReadBwIndex(const int16_t* encoded,
int16_t* rateIndex)
{
Bitstr_dec streamdata;
- uint16_t partOfStream[5];
#ifndef WEBRTC_ARCH_BIG_ENDIAN
int k;
#endif
int16_t err;
- /* Set stream pointer to point at partOfStream */
- streamdata.stream = (uint16_t *)partOfStream;
-
streamdata.W_upper = 0xFFFFFFFF;
streamdata.streamval = 0;
streamdata.stream_index = 0;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.gypi b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.gypi
index 87c98606a11..a18a803d659 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.gypi
@@ -85,6 +85,30 @@
'pitch_filter_c.c',
],
}],
+ ['target_arch=="mipsel"', {
+ 'sources': [
+ 'filters_mips.c',
+ 'lattice_mips.c',
+ ],
+ 'sources!': [
+ 'lattice_c.c',
+ ],
+ 'conditions': [
+ ['mips_dsp_rev>0', {
+ 'sources': [
+ 'filterbanks_mips.c',
+ ],
+ }],
+ ['mips_dsp_rev>1', {
+ 'sources': [
+ 'pitch_filter_mips.c',
+ ],
+ 'sources!': [
+ 'pitch_filter_c.c',
+ ],
+ }],
+ ],
+ }],
],
},
],
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/lattice_mips.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/lattice_mips.c
new file mode 100644
index 00000000000..c596922168e
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/lattice_mips.c
@@ -0,0 +1,327 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/settings.h"
+#include "webrtc/typedefs.h"
+
+// Filter ar_g_Q0[] and ar_f_Q0[] through an AR filter with coefficients
+// cth_Q15[] and sth_Q15[].
+void WebRtcIsacfix_FilterArLoop(int16_t* ar_g_Q0, // Input samples
+ int16_t* ar_f_Q0, // Input samples
+ int16_t* cth_Q15, // Filter coefficients
+ int16_t* sth_Q15, // Filter coefficients
+ int16_t order_coef) { // order of the filter
+ int n = 0;
+
+ for (n = 0; n < HALF_SUBFRAMELEN - 1; n++) {
+ int count = order_coef - 1;
+ int offset;
+#if !defined(MIPS_DSP_R1_LE)
+ int16_t* tmp_cth;
+ int16_t* tmp_sth;
+ int16_t* tmp_arg;
+ int32_t max_q16 = 0x7fff;
+ int32_t min_q16 = 0xffff8000;
+#endif
+ // Declare variables used as temporary registers.
+ int32_t r0, r1, r2, t0, t1, t2, t_ar;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "bltz %[count], 2f \n\t"
+ " lh %[t_ar], 0(%[tmp]) \n\t"
+ // Inner loop
+ "1: \n\t"
+ "sll %[offset], %[count], 1 \n\t"
+#if defined(MIPS_DSP_R1_LE)
+ "lhx %[r0], %[offset](%[cth_Q15]) \n\t"
+ "lhx %[r1], %[offset](%[sth_Q15]) \n\t"
+ "lhx %[r2], %[offset](%[ar_g_Q0]) \n\t"
+#else
+ "addu %[tmp_cth], %[cth_Q15], %[offset] \n\t"
+ "addu %[tmp_sth], %[sth_Q15], %[offset] \n\t"
+ "addu %[tmp_arg], %[ar_g_Q0], %[offset] \n\t"
+ "lh %[r0], 0(%[tmp_cth]) \n\t"
+ "lh %[r1], 0(%[tmp_sth]) \n\t"
+ "lh %[r2], 0(%[tmp_arg]) \n\t"
+#endif
+ "mul %[t0], %[r0], %[t_ar] \n\t"
+ "mul %[t1], %[r1], %[t_ar] \n\t"
+ "mul %[t2], %[r1], %[r2] \n\t"
+ "mul %[r0], %[r0], %[r2] \n\t"
+ "subu %[t0], %[t0], %[t2] \n\t"
+ "addu %[t1], %[t1], %[r0] \n\t"
+#if defined(MIPS_DSP_R1_LE)
+ "shra_r.w %[t1], %[t1], 15 \n\t"
+ "shra_r.w %[t0], %[t0], 15 \n\t"
+#else
+ "addiu %[t1], %[t1], 0x4000 \n\t"
+ "sra %[t1], %[t1], 15 \n\t"
+ "addiu %[t0], %[t0], 0x4000 \n\t"
+ "sra %[t0], %[t0], 15 \n\t"
+#endif
+ "addiu %[offset], %[offset], 2 \n\t"
+#if defined(MIPS_DSP_R1_LE)
+ "shll_s.w %[t1], %[t1], 16 \n\t"
+ "shll_s.w %[t_ar], %[t0], 16 \n\t"
+#else
+ "slt %[r0], %[t1], %[max_q16] \n\t"
+ "slt %[r1], %[t0], %[max_q16] \n\t"
+ "movz %[t1], %[max_q16], %[r0] \n\t"
+ "movz %[t0], %[max_q16], %[r1] \n\t"
+#endif
+ "addu %[offset], %[offset], %[ar_g_Q0] \n\t"
+#if defined(MIPS_DSP_R1_LE)
+ "sra %[t1], %[t1], 16 \n\t"
+ "sra %[t_ar], %[t_ar], 16 \n\t"
+#else
+ "slt %[r0], %[t1], %[min_q16] \n\t"
+ "slt %[r1], %[t0], %[min_q16] \n\t"
+ "movn %[t1], %[min_q16], %[r0] \n\t"
+ "movn %[t0], %[min_q16], %[r1] \n\t"
+ "addu %[t_ar], $zero, %[t0] \n\t"
+#endif
+ "sh %[t1], 0(%[offset]) \n\t"
+ "bgtz %[count], 1b \n\t"
+ " addiu %[count], %[count], -1 \n\t"
+ "2: \n\t"
+ "sh %[t_ar], 0(%[tmp]) \n\t"
+ "sh %[t_ar], 0(%[ar_g_Q0]) \n\t"
+ ".set pop \n\t"
+ : [t_ar] "=&r" (t_ar), [count] "+r" (count), [offset] "=&r" (offset),
+ [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2), [t0] "=&r" (t0),
+#if !defined(MIPS_DSP_R1_LE)
+ [tmp_cth] "=&r" (tmp_cth), [tmp_sth] "=&r" (tmp_sth),
+ [tmp_arg] "=&r" (tmp_arg),
+#endif
+ [t1] "=&r" (t1), [t2] "=&r" (t2)
+ : [tmp] "r" (&ar_f_Q0[n+1]), [cth_Q15] "r" (cth_Q15),
+#if !defined(MIPS_DSP_R1_LE)
+ [max_q16] "r" (max_q16), [min_q16] "r" (min_q16),
+#endif
+ [sth_Q15] "r" (sth_Q15), [ar_g_Q0] "r" (ar_g_Q0)
+ : "memory", "hi", "lo"
+ );
+ }
+}
+
+// MIPS optimization of the inner loop used for function
+// WebRtcIsacfix_NormLatticeFilterMa(). It does:
+//
+// for 0 <= n < HALF_SUBFRAMELEN - 1:
+// *ptr2 = input2 * (*ptr2) + input0 * (*ptr0));
+// *ptr1 = input1 * (*ptr0) + input0 * (*ptr2);
+//
+// Note, function WebRtcIsacfix_FilterMaLoopMIPS and WebRtcIsacfix_FilterMaLoopC
+// are not bit-exact. The accuracy of the MIPS function is same or better.
+void WebRtcIsacfix_FilterMaLoopMIPS(int16_t input0, // Filter coefficient
+ int16_t input1, // Filter coefficient
+ int32_t input2, // Inverse coeff (1/input1)
+ int32_t* ptr0, // Sample buffer
+ int32_t* ptr1, // Sample buffer
+ int32_t* ptr2) { // Sample buffer
+#if defined(MIPS_DSP_R2_LE)
+ // MIPS DSPR2 version. 4 available accumulators allows loop unrolling 4 times.
+ // This variant is not bit-exact with WebRtcIsacfix_FilterMaLoopC, since we
+ // are exploiting 64-bit accumulators. The accuracy of the MIPS DSPR2 function
+ // is same or better.
+ int n = (HALF_SUBFRAMELEN - 1) >> 2;
+ int m = (HALF_SUBFRAMELEN - 1) & 3;
+
+ int r0, r1, r2, r3;
+ int t0, t1, t2, t3;
+ int s0, s1, s2, s3;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "1: \n\t"
+ "lw %[r0], 0(%[ptr0]) \n\t"
+ "lw %[r1], 4(%[ptr0]) \n\t"
+ "lw %[r2], 8(%[ptr0]) \n\t"
+ "lw %[r3], 12(%[ptr0]) \n\t"
+ "mult $ac0, %[r0], %[input0] \n\t"
+ "mult $ac1, %[r1], %[input0] \n\t"
+ "mult $ac2, %[r2], %[input0] \n\t"
+ "mult $ac3, %[r3], %[input0] \n\t"
+ "lw %[t0], 0(%[ptr2]) \n\t"
+ "extr_rs.w %[s0], $ac0, 15 \n\t"
+ "extr_rs.w %[s1], $ac1, 15 \n\t"
+ "extr_rs.w %[s2], $ac2, 15 \n\t"
+ "extr_rs.w %[s3], $ac3, 15 \n\t"
+ "lw %[t1], 4(%[ptr2]) \n\t"
+ "lw %[t2], 8(%[ptr2]) \n\t"
+ "lw %[t3], 12(%[ptr2]) \n\t"
+ "addu %[t0], %[t0], %[s0] \n\t"
+ "addu %[t1], %[t1], %[s1] \n\t"
+ "addu %[t2], %[t2], %[s2] \n\t"
+ "addu %[t3], %[t3], %[s3] \n\t"
+ "mult $ac0, %[t0], %[input2] \n\t"
+ "mult $ac1, %[t1], %[input2] \n\t"
+ "mult $ac2, %[t2], %[input2] \n\t"
+ "mult $ac3, %[t3], %[input2] \n\t"
+ "addiu %[ptr0], %[ptr0], 16 \n\t"
+ "extr_rs.w %[t0], $ac0, 16 \n\t"
+ "extr_rs.w %[t1], $ac1, 16 \n\t"
+ "extr_rs.w %[t2], $ac2, 16 \n\t"
+ "extr_rs.w %[t3], $ac3, 16 \n\t"
+ "addiu %[n], %[n], -1 \n\t"
+ "mult $ac0, %[r0], %[input1] \n\t"
+ "mult $ac1, %[r1], %[input1] \n\t"
+ "mult $ac2, %[r2], %[input1] \n\t"
+ "mult $ac3, %[r3], %[input1] \n\t"
+ "sw %[t0], 0(%[ptr2]) \n\t"
+ "extr_rs.w %[s0], $ac0, 15 \n\t"
+ "extr_rs.w %[s1], $ac1, 15 \n\t"
+ "extr_rs.w %[s2], $ac2, 15 \n\t"
+ "extr_rs.w %[s3], $ac3, 15 \n\t"
+ "sw %[t1], 4(%[ptr2]) \n\t"
+ "sw %[t2], 8(%[ptr2]) \n\t"
+ "sw %[t3], 12(%[ptr2]) \n\t"
+ "mult $ac0, %[t0], %[input0] \n\t"
+ "mult $ac1, %[t1], %[input0] \n\t"
+ "mult $ac2, %[t2], %[input0] \n\t"
+ "mult $ac3, %[t3], %[input0] \n\t"
+ "addiu %[ptr2], %[ptr2], 16 \n\t"
+ "extr_rs.w %[t0], $ac0, 15 \n\t"
+ "extr_rs.w %[t1], $ac1, 15 \n\t"
+ "extr_rs.w %[t2], $ac2, 15 \n\t"
+ "extr_rs.w %[t3], $ac3, 15 \n\t"
+ "addu %[t0], %[t0], %[s0] \n\t"
+ "addu %[t1], %[t1], %[s1] \n\t"
+ "addu %[t2], %[t2], %[s2] \n\t"
+ "addu %[t3], %[t3], %[s3] \n\t"
+ "sw %[t0], 0(%[ptr1]) \n\t"
+ "sw %[t1], 4(%[ptr1]) \n\t"
+ "sw %[t2], 8(%[ptr1]) \n\t"
+ "sw %[t3], 12(%[ptr1]) \n\t"
+ "bgtz %[n], 1b \n\t"
+ " addiu %[ptr1], %[ptr1], 16 \n\t"
+ "beq %[m], %0, 3f \n\t"
+ " nop \n\t"
+ "2: \n\t"
+ "lw %[r0], 0(%[ptr0]) \n\t"
+ "lw %[t0], 0(%[ptr2]) \n\t"
+ "addiu %[ptr0], %[ptr0], 4 \n\t"
+ "mult $ac0, %[r0], %[input0] \n\t"
+ "mult $ac1, %[r0], %[input1] \n\t"
+ "extr_rs.w %[r1], $ac0, 15 \n\t"
+ "extr_rs.w %[t1], $ac1, 15 \n\t"
+ "addu %[t0], %[t0], %[r1] \n\t"
+ "mult $ac0, %[t0], %[input2] \n\t"
+ "extr_rs.w %[t0], $ac0, 16 \n\t"
+ "sw %[t0], 0(%[ptr2]) \n\t"
+ "mult $ac0, %[t0], %[input0] \n\t"
+ "addiu %[ptr2], %[ptr2], 4 \n\t"
+ "addiu %[m], %[m], -1 \n\t"
+ "extr_rs.w %[t0], $ac0, 15 \n\t"
+ "addu %[t0], %[t0], %[t1] \n\t"
+ "sw %[t0], 0(%[ptr1]) \n\t"
+ "bgtz %[m], 2b \n\t"
+ " addiu %[ptr1], %[ptr1], 4 \n\t"
+ "3: \n\t"
+ ".set pop \n\t"
+ : [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2),
+ [r3] "=&r" (r3), [t0] "=&r" (t0), [t1] "=&r" (t1),
+ [t2] "=&r" (t2), [t3] "=&r" (t3), [s0] "=&r" (s0),
+ [s1] "=&r" (s1), [s2] "=&r" (s2), [s3] "=&r" (s3),
+ [ptr0] "+r" (ptr0), [ptr1] "+r" (ptr1), [m] "+r" (m),
+ [ptr2] "+r" (ptr2), [n] "+r" (n)
+ : [input0] "r" (input0), [input1] "r" (input1),
+ [input2] "r" (input2)
+ : "memory", "hi", "lo", "$ac1hi", "$ac1lo", "$ac2hi",
+ "$ac2lo", "$ac3hi", "$ac3lo"
+ );
+#else
+ // Non-DSPR2 version of the function. Avoiding the accumulator usage due to
+ // large latencies. This variant is bit-exact with C code.
+ int n = HALF_SUBFRAMELEN - 1;
+ int32_t t16a, t16b;
+ int32_t r0, r1, r2, r3, r4;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "sra %[t16a], %[input2], 16 \n\t"
+ "andi %[t16b], %[input2], 0xFFFF \n\t"
+#if defined(MIPS32R2_LE)
+ "seh %[t16b], %[t16b] \n\t"
+ "seh %[input0], %[input0] \n\t"
+ "seh %[input1], %[input1] \n\t"
+#else
+ "sll %[t16b], %[t16b], 16 \n\t"
+ "sra %[t16b], %[t16b], 16 \n\t"
+ "sll %[input0], %[input0], 16 \n\t"
+ "sra %[input0], %[input0], 16 \n\t"
+ "sll %[input1], %[input1], 16 \n\t"
+ "sra %[input1], %[input1], 16 \n\t"
+#endif
+ "addiu %[r0], %[t16a], 1 \n\t"
+ "slt %[r1], %[t16b], $zero \n\t"
+ "movn %[t16a], %[r0], %[r1] \n\t"
+ "1: \n\t"
+ "lw %[r0], 0(%[ptr0]) \n\t"
+ "lw %[r1], 0(%[ptr2]) \n\t"
+ "addiu %[ptr0], %[ptr0], 4 \n\t"
+ "sra %[r2], %[r0], 16 \n\t"
+ "andi %[r0], %[r0], 0xFFFF \n\t"
+ "mul %[r3], %[r2], %[input0] \n\t"
+ "mul %[r4], %[r0], %[input0] \n\t"
+ "mul %[r2], %[r2], %[input1] \n\t"
+ "mul %[r0], %[r0], %[input1] \n\t"
+ "addiu %[ptr2], %[ptr2], 4 \n\t"
+ "sll %[r3], %[r3], 1 \n\t"
+ "sra %[r4], %[r4], 1 \n\t"
+ "addiu %[r4], %[r4], 0x2000 \n\t"
+ "sra %[r4], %[r4], 14 \n\t"
+ "addu %[r3], %[r3], %[r4] \n\t"
+ "addu %[r1], %[r1], %[r3] \n\t"
+ "sra %[r3], %[r1], 16 \n\t"
+ "andi %[r4], %[r1], 0xFFFF \n\t"
+ "sra %[r4], %[r4], 1 \n\t"
+ "mul %[r1], %[r1], %[t16a] \n\t"
+ "mul %[r3], %[r3], %[t16b] \n\t"
+ "mul %[r4], %[r4], %[t16b] \n\t"
+ "sll %[r2], %[r2], 1 \n\t"
+ "sra %[r0], %[r0], 1 \n\t"
+ "addiu %[r0], %[r0], 0x2000 \n\t"
+ "sra %[r0], %[r0], 14 \n\t"
+ "addu %[r0], %[r0], %[r2] \n\t"
+ "addiu %[n], %[n], -1 \n\t"
+ "addu %[r1], %[r1], %[r3] \n\t"
+ "addiu %[r4], %[r4], 0x4000 \n\t"
+ "sra %[r4], %[r4], 15 \n\t"
+ "addu %[r1], %[r1], %[r4] \n\t"
+ "sra %[r2], %[r1], 16 \n\t"
+ "andi %[r3], %[r1], 0xFFFF \n\t"
+ "mul %[r3], %[r3], %[input0] \n\t"
+ "mul %[r2], %[r2], %[input0] \n\t"
+ "sw %[r1], -4(%[ptr2]) \n\t"
+ "sra %[r3], %[r3], 1 \n\t"
+ "addiu %[r3], %[r3], 0x2000 \n\t"
+ "sra %[r3], %[r3], 14 \n\t"
+ "addu %[r0], %[r0], %[r3] \n\t"
+ "sll %[r2], %[r2], 1 \n\t"
+ "addu %[r0], %[r0], %[r2] \n\t"
+ "sw %[r0], 0(%[ptr1]) \n\t"
+ "bgtz %[n], 1b \n\t"
+ " addiu %[ptr1], %[ptr1], 4 \n\t"
+ ".set pop \n\t"
+ : [t16a] "=&r" (t16a), [t16b] "=&r" (t16b), [r0] "=&r" (r0),
+ [r1] "=&r" (r1), [r2] "=&r" (r2), [r3] "=&r" (r3),
+ [r4] "=&r" (r4), [ptr0] "+r" (ptr0), [ptr1] "+r" (ptr1),
+ [ptr2] "+r" (ptr2), [n] "+r" (n)
+ : [input0] "r" (input0), [input1] "r" (input1),
+ [input2] "r" (input2)
+ : "hi", "lo", "memory"
+ );
+#endif
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c
index 0dc8174399e..deba0d5e29f 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c
@@ -834,13 +834,15 @@ void WebRtcIsacfix_GetLpcCoef(int16_t *inLoQ0,
/* bandwidth expansion */
for (n = 1; n <= ORDERLO; n++) {
- a_LOQ11[n] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT_WITH_FIXROUND(kPolyVecLo[n-1], a_LOQ11[n]);
+ a_LOQ11[n] = (int16_t) ((WEBRTC_SPL_MUL_16_16(
+ kPolyVecLo[n-1], a_LOQ11[n]) + ((int32_t) (1 << 14))) >> 15);
}
polyHI[0] = a_HIQ12[0];
for (n = 1; n <= ORDERHI; n++) {
- a_HIQ12[n] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT_WITH_FIXROUND(kPolyVecHi[n-1], a_HIQ12[n]);
+ a_HIQ12[n] = (int16_t) ((WEBRTC_SPL_MUL_16_16(
+ kPolyVecHi[n-1], a_HIQ12[n]) + ((int32_t) (1 << 14))) >> 15);
polyHI[n] = a_HIQ12[n];
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_mips.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_mips.c
new file mode 100644
index 00000000000..8334f7eb18b
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_mips.c
@@ -0,0 +1,133 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h"
+
+void WebRtcIsacfix_PitchFilterCore(int loopNumber,
+ int16_t gain,
+ int index,
+ int16_t sign,
+ int16_t* inputState,
+ int16_t* outputBuf2,
+ const int16_t* coefficient,
+ int16_t* inputBuf,
+ int16_t* outputBuf,
+ int* index2) {
+ int ind2t = *index2;
+ int i = 0;
+ int16_t* out2_pos2 = &outputBuf2[PITCH_BUFFSIZE - (index + 2)] + ind2t;
+ int32_t w1, w2, w3, w4, w5, gain32, sign32;
+ int32_t coef1, coef2, coef3, coef4, coef5 = 0;
+ // Define damp factors as int32_t (pair of int16_t)
+ int32_t kDampF0 = 0x0000F70A;
+ int32_t kDampF1 = 0x51EC2000;
+ int32_t kDampF2 = 0xF70A2000;
+ int16_t* input1 = inputBuf + ind2t;
+ int16_t* output1 = outputBuf + ind2t;
+ int16_t* output2 = outputBuf2 + ind2t + PITCH_BUFFSIZE;
+
+ // Load coefficients outside the loop and sign-extend gain and sign
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "lwl %[coef1], 3(%[coefficient]) \n\t"
+ "lwl %[coef2], 7(%[coefficient]) \n\t"
+ "lwl %[coef3], 11(%[coefficient]) \n\t"
+ "lwl %[coef4], 15(%[coefficient]) \n\t"
+ "lwr %[coef1], 0(%[coefficient]) \n\t"
+ "lwr %[coef2], 4(%[coefficient]) \n\t"
+ "lwr %[coef3], 8(%[coefficient]) \n\t"
+ "lwr %[coef4], 12(%[coefficient]) \n\t"
+ "lhu %[coef5], 16(%[coefficient]) \n\t"
+ "seh %[gain32], %[gain] \n\t"
+ "seh %[sign32], %[sign] \n\t"
+ ".set pop \n\t"
+ : [coef1] "=&r" (coef1), [coef2] "=&r" (coef2), [coef3] "=&r" (coef3),
+ [coef4] "=&r" (coef4), [coef5] "=&r" (coef5), [gain32] "=&r" (gain32),
+ [sign32] "=&r" (sign32)
+ : [coefficient] "r" (coefficient), [gain] "r" (gain),
+ [sign] "r" (sign)
+ : "memory"
+ );
+
+ for (i = 0; i < loopNumber; i++) {
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ // Filter to get fractional pitch
+ "li %[w1], 8192 \n\t"
+ "mtlo %[w1] \n\t"
+ "mthi $0 \n\t"
+ "lwl %[w1], 3(%[out2_pos2]) \n\t"
+ "lwl %[w2], 7(%[out2_pos2]) \n\t"
+ "lwl %[w3], 11(%[out2_pos2]) \n\t"
+ "lwl %[w4], 15(%[out2_pos2]) \n\t"
+ "lwr %[w1], 0(%[out2_pos2]) \n\t"
+ "lwr %[w2], 4(%[out2_pos2]) \n\t"
+ "lwr %[w3], 8(%[out2_pos2]) \n\t"
+ "lwr %[w4], 12(%[out2_pos2]) \n\t"
+ "lhu %[w5], 16(%[out2_pos2]) \n\t"
+ "dpa.w.ph $ac0, %[w1], %[coef1] \n\t"
+ "dpa.w.ph $ac0, %[w2], %[coef2] \n\t"
+ "dpa.w.ph $ac0, %[w3], %[coef3] \n\t"
+ "dpa.w.ph $ac0, %[w4], %[coef4] \n\t"
+ "dpa.w.ph $ac0, %[w5], %[coef5] \n\t"
+ "addiu %[out2_pos2], %[out2_pos2], 2 \n\t"
+ "mthi $0, $ac1 \n\t"
+ "lwl %[w2], 3(%[inputState]) \n\t"
+ "lwl %[w3], 7(%[inputState]) \n\t"
+ // Fractional pitch shift & saturation
+ "extr_s.h %[w1], $ac0, 14 \n\t"
+ "li %[w4], 16384 \n\t"
+ "lwr %[w2], 0(%[inputState]) \n\t"
+ "lwr %[w3], 4(%[inputState]) \n\t"
+ "mtlo %[w4], $ac1 \n\t"
+ // Shift low pass filter state
+ "swl %[w2], 5(%[inputState]) \n\t"
+ "swl %[w3], 9(%[inputState]) \n\t"
+ "mul %[w1], %[gain32], %[w1] \n\t"
+ "swr %[w2], 2(%[inputState]) \n\t"
+ "swr %[w3], 6(%[inputState]) \n\t"
+ // Low pass filter accumulation
+ "dpa.w.ph $ac1, %[kDampF1], %[w2] \n\t"
+ "dpa.w.ph $ac1, %[kDampF2], %[w3] \n\t"
+ "lh %[w4], 0(%[input1]) \n\t"
+ "addiu %[input1], %[input1], 2 \n\t"
+ "shra_r.w %[w1], %[w1], 12 \n\t"
+ "sh %[w1], 0(%[inputState]) \n\t"
+ "dpa.w.ph $ac1, %[kDampF0], %[w1] \n\t"
+ // Low pass filter shift & saturation
+ "extr_s.h %[w2], $ac1, 15 \n\t"
+ "mul %[w2], %[w2], %[sign32] \n\t"
+ // Buffer update
+ "subu %[w2], %[w4], %[w2] \n\t"
+ "shll_s.w %[w2], %[w2], 16 \n\t"
+ "sra %[w2], %[w2], 16 \n\t"
+ "sh %[w2], 0(%[output1]) \n\t"
+ "addu %[w2], %[w2], %[w4] \n\t"
+ "shll_s.w %[w2], %[w2], 16 \n\t"
+ "addiu %[output1], %[output1], 2 \n\t"
+ "sra %[w2], %[w2], 16 \n\t"
+ "sh %[w2], 0(%[output2]) \n\t"
+ "addiu %[output2], %[output2], 2 \n\t"
+ ".set pop \n\t"
+ : [w1] "=&r" (w1), [w2] "=&r" (w2), [w3] "=&r" (w3), [w4] "=&r" (w4),
+ [w5] "=&r" (w5), [input1] "+r" (input1), [out2_pos2] "+r" (out2_pos2),
+ [output1] "+r" (output1), [output2] "+r" (output2)
+ : [coefficient] "r" (coefficient), [inputState] "r" (inputState),
+ [gain32] "r" (gain32), [sign32] "r" (sign32), [kDampF0] "r" (kDampF0),
+ [kDampF1] "r" (kDampF1), [kDampF2] "r" (kDampF2),
+ [coef1] "r" (coef1), [coef2] "r" (coef2), [coef3] "r" (coef3),
+ [coef4] "r" (coef4), [coef5] "r" (coef5)
+ : "hi", "lo", "$ac1hi", "$ac1lo", "memory"
+ );
+ }
+ (*index2) += loopNumber;
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/structs.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/structs.h
index 4d043566369..bd20ba0165a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/structs.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/structs.h
@@ -26,13 +26,14 @@
/* Bitstream struct for decoder */
typedef struct Bitstreamstruct_dec {
- uint16_t *stream; /* Pointer to bytestream to decode */
+ uint16_t stream[STREAM_MAXW16_60MS]; /* Array bytestream to decode */
uint32_t W_upper; /* Upper boundary of interval W */
uint32_t streamval;
uint16_t stream_index; /* Index to the current position in bytestream */
int16_t full; /* 0 - first byte in memory filled, second empty*/
/* 1 - both bytes are empty (we just filled the previous memory */
+ int stream_size; /* The size of stream. */
} Bitstr_dec;
/* Bitstream struct for encoder */
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.S b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.S
index 46682ac556a..6713b28695c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.S
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.S
@@ -42,7 +42,11 @@ DEFINE_FUNCTION WebRtcIsacfix_Time2SpecNeon
add r5, sp, #(16 + FRAMESAMPLES * 2) @ tmpimQ16;
adr r9, WebRtcIsacfix_kCosTab1
+#if defined(__APPLE__)
+ mov r6, #:lower16:(WebRtcIsacfix_kSinTab1 - WebRtcIsacfix_kCosTab1)
+#else
mov r6, #(WebRtcIsacfix_kSinTab1 - WebRtcIsacfix_kCosTab1)
+#endif
add r10, r9, r6 @ WebRtcIsacfix_kSinTab1
vmov.u32 q14, #0 @ Initialize the maximum values for tmpInIm.
@@ -455,7 +459,12 @@ TransformAndFindMax:
bgt TransformAndFindMax
adr r10, WebRtcIsacfix_kSinTab1
+#if defined(__APPLE__)
+ mov r2, #:lower16:(WebRtcIsacfix_kSinTab1 - WebRtcIsacfix_kCosTab1)
+#else
mov r2, #(WebRtcIsacfix_kSinTab1 - WebRtcIsacfix_kCosTab1)
+#endif
+
sub r11, r10, r2 @ WebRtcIsacfix_kCosTab1
@ Find the maximum value in the Neon registers
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h
index f937b3453fa..76a61e6d33c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h
@@ -14,7 +14,7 @@
/*
* Define the fixed-point numeric formats
*/
-#include "typedefs.h"
+#include "webrtc/typedefs.h"
typedef struct WebRtcISACStruct ISACStruct;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c
index 66bf06d472b..9ae69a0bbf2 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c
@@ -1449,13 +1449,18 @@ void WebRtcIsac_EncodeRc(int16_t* RCQ15, Bitstr* streamdata) {
/* quantize reflection coefficients (add noise feedback?) */
for (k = 0; k < AR_ORDER; k++) {
index[k] = WebRtcIsac_kQArRcInitIndex[k];
-
+ // The safe-guards in following while conditions are to suppress gcc 4.8.3
+ // warnings, Issue 2888. Otherwise, first and last elements of
+ // |WebRtcIsac_kQArBoundaryLevels| are such that the following search
+ // *never* cause an out-of-boundary read.
if (RCQ15[k] > WebRtcIsac_kQArBoundaryLevels[index[k]]) {
- while (RCQ15[k] > WebRtcIsac_kQArBoundaryLevels[index[k] + 1]) {
+ while (index[k] + 1 < NUM_AR_RC_QUANT_BAUNDARY &&
+ RCQ15[k] > WebRtcIsac_kQArBoundaryLevels[index[k] + 1]) {
index[k]++;
}
} else {
- while (RCQ15[k] < WebRtcIsac_kQArBoundaryLevels[--index[k]]) ;
+ while (index[k] > 0 &&
+ RCQ15[k] < WebRtcIsac_kQArBoundaryLevels[--index[k]]) ;
}
RCQ15[k] = *(WebRtcIsac_kQArRcLevelsPtr[k] + index[k]);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c
index f3f1650b42b..fa54a8d873c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c
@@ -15,20 +15,21 @@
*
*/
-#include "isac.h"
-#include "bandwidth_estimator.h"
-#include "crc.h"
-#include "entropy_coding.h"
-#include "codec.h"
-#include "structs.h"
-#include "signal_processing_library.h"
-#include "lpc_shape_swb16_tables.h"
-#include "os_specific_inline.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h"
+#include <math.h>
#include <stdio.h>
-#include <string.h>
#include <stdlib.h>
-#include <math.h>
+#include <string.h>
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/source/codec.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/source/crc.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/source/os_specific_inline.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/source/structs.h"
#define BIT_MASK_DEC_INIT 0x0001
#define BIT_MASK_ENC_INIT 0x0002
@@ -273,7 +274,7 @@ int16_t WebRtcIsac_Create(ISACStruct** ISAC_main_inst) {
ISACMainStruct* instISAC;
if (ISAC_main_inst != NULL) {
- instISAC = (ISACMainStruct*)WEBRTC_SPL_VNEW(ISACMainStruct, 1);
+ instISAC = (ISACMainStruct*)malloc(sizeof(ISACMainStruct));
*ISAC_main_inst = (ISACStruct*)instISAC;
if (*ISAC_main_inst != NULL) {
instISAC->errorCode = 0;
@@ -306,7 +307,7 @@ int16_t WebRtcIsac_Create(ISACStruct** ISAC_main_inst) {
*/
int16_t WebRtcIsac_Free(ISACStruct* ISAC_main_inst) {
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
- WEBRTC_SPL_FREE(instISAC);
+ free(instISAC);
return 0;
}
@@ -552,8 +553,8 @@ int16_t WebRtcIsac_Encode(ISACStruct* ISAC_main_inst,
}
if (instISAC->encoderSamplingRateKHz == kIsacSuperWideband) {
- WebRtcSpl_AnalysisQMF(speech_in_ptr, speechInLB, speechInUB,
- instISAC->analysisFBState1,
+ WebRtcSpl_AnalysisQMF(speech_in_ptr, SWBFRAMESAMPLES_10ms, speechInLB,
+ speechInUB, instISAC->analysisFBState1,
instISAC->analysisFBState2);
/* Convert from fixed to floating point. */
@@ -1314,7 +1315,7 @@ static int16_t Decode(ISACStruct* ISAC_main_inst,
speechIdx = 0;
while (speechIdx < numSamplesLB) {
WebRtcSpl_SynthesisQMF(&outFrameLB[speechIdx], &outFrameUB[speechIdx],
- &decoded[(speechIdx << 1)],
+ FRAMESAMPLES_10ms, &decoded[(speechIdx << 1)],
instISAC->synthesisFBState1,
instISAC->synthesisFBState2);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c
index 9eae0555f65..0f6d889225d 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c
@@ -13,68 +13,69 @@
/********************* AR Coefficient Tables ************************/
/* cdf for quantized reflection coefficient 1 */
-const uint16_t WebRtcIsac_kQArRc1Cdf[12] = {
+const uint16_t WebRtcIsac_kQArRc1Cdf[NUM_AR_RC_QUANT_BAUNDARY] = {
0, 2, 4, 129, 7707, 57485, 65495, 65527, 65529, 65531,
65533, 65535};
/* cdf for quantized reflection coefficient 2 */
-const uint16_t WebRtcIsac_kQArRc2Cdf[12] = {
+const uint16_t WebRtcIsac_kQArRc2Cdf[NUM_AR_RC_QUANT_BAUNDARY] = {
0, 2, 4, 7, 531, 25298, 64525, 65526, 65529, 65531,
65533, 65535};
/* cdf for quantized reflection coefficient 3 */
-const uint16_t WebRtcIsac_kQArRc3Cdf[12] = {
+const uint16_t WebRtcIsac_kQArRc3Cdf[NUM_AR_RC_QUANT_BAUNDARY] = {
0, 2, 4, 6, 620, 22898, 64843, 65527, 65529, 65531,
65533, 65535};
/* cdf for quantized reflection coefficient 4 */
-const uint16_t WebRtcIsac_kQArRc4Cdf[12] = {
+const uint16_t WebRtcIsac_kQArRc4Cdf[NUM_AR_RC_QUANT_BAUNDARY] = {
0, 2, 4, 6, 35, 10034, 60733, 65506, 65529, 65531,
65533, 65535};
/* cdf for quantized reflection coefficient 5 */
-const uint16_t WebRtcIsac_kQArRc5Cdf[12] = {
+const uint16_t WebRtcIsac_kQArRc5Cdf[NUM_AR_RC_QUANT_BAUNDARY] = {
0, 2, 4, 6, 36, 7567, 56727, 65385, 65529, 65531,
65533, 65535};
/* cdf for quantized reflection coefficient 6 */
-const uint16_t WebRtcIsac_kQArRc6Cdf[12] = {
+const uint16_t WebRtcIsac_kQArRc6Cdf[NUM_AR_RC_QUANT_BAUNDARY] = {
0, 2, 4, 6, 14, 6579, 57360, 65409, 65529, 65531,
65533, 65535};
/* representation levels for quantized reflection coefficient 1 */
-const int16_t WebRtcIsac_kQArRc1Levels[11] = {
+const int16_t WebRtcIsac_kQArRc1Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = {
-32104, -29007, -23202, -15496, -9279, -2577, 5934, 17535, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 2 */
-const int16_t WebRtcIsac_kQArRc2Levels[11] = {
+const int16_t WebRtcIsac_kQArRc2Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = {
-32104, -29503, -23494, -15261, -7309, -1399, 6158, 16381, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 3 */
-const int16_t WebRtcIsac_kQArRc3Levels[11] = {
+const int16_t WebRtcIsac_kQArRc3Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = {
-32104, -29503, -23157, -15186, -7347, -1359, 5829, 17535, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 4 */
-const int16_t WebRtcIsac_kQArRc4Levels[11] = {
+const int16_t WebRtcIsac_kQArRc4Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = {
-32104, -29503, -24512, -15362, -6665, -342, 6596, 14585, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 5 */
-const int16_t WebRtcIsac_kQArRc5Levels[11] = {
+const int16_t WebRtcIsac_kQArRc5Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = {
-32104, -29503, -24512, -15005, -6564, -106, 7123, 14920, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 6 */
-const int16_t WebRtcIsac_kQArRc6Levels[11] = {
+const int16_t WebRtcIsac_kQArRc6Levels[NUM_AR_RC_QUANT_BAUNDARY - 1] = {
-32104, -29503, -24512, -15096, -6656, -37, 7036, 14847, 24512, 29503, 32104
};
/* quantization boundary levels for reflection coefficients */
-const int16_t WebRtcIsac_kQArBoundaryLevels[12] = {
--32768, -31441, -27566, -21458, -13612, -4663, 4663, 13612, 21458, 27566, 31441, 32767
+const int16_t WebRtcIsac_kQArBoundaryLevels[NUM_AR_RC_QUANT_BAUNDARY] = {
+-32768, -31441, -27566, -21458, -13612, -4663, 4663, 13612, 21458, 27566, 31441,
+32767
};
/* initial index for AR reflection coefficient quantizer and cdf table search */
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h
index 22fe6a2102c..989cb367bff 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h
@@ -21,27 +21,29 @@
#include "structs.h"
+#define NUM_AR_RC_QUANT_BAUNDARY 12
+
/********************* AR Coefficient Tables ************************/
/* cdf for quantized reflection coefficient 1 */
-extern const uint16_t WebRtcIsac_kQArRc1Cdf[12];
+extern const uint16_t WebRtcIsac_kQArRc1Cdf[NUM_AR_RC_QUANT_BAUNDARY];
/* cdf for quantized reflection coefficient 2 */
-extern const uint16_t WebRtcIsac_kQArRc2Cdf[12];
+extern const uint16_t WebRtcIsac_kQArRc2Cdf[NUM_AR_RC_QUANT_BAUNDARY];
/* cdf for quantized reflection coefficient 3 */
-extern const uint16_t WebRtcIsac_kQArRc3Cdf[12];
+extern const uint16_t WebRtcIsac_kQArRc3Cdf[NUM_AR_RC_QUANT_BAUNDARY];
/* cdf for quantized reflection coefficient 4 */
-extern const uint16_t WebRtcIsac_kQArRc4Cdf[12];
+extern const uint16_t WebRtcIsac_kQArRc4Cdf[NUM_AR_RC_QUANT_BAUNDARY];
/* cdf for quantized reflection coefficient 5 */
-extern const uint16_t WebRtcIsac_kQArRc5Cdf[12];
+extern const uint16_t WebRtcIsac_kQArRc5Cdf[NUM_AR_RC_QUANT_BAUNDARY];
/* cdf for quantized reflection coefficient 6 */
-extern const uint16_t WebRtcIsac_kQArRc6Cdf[12];
+extern const uint16_t WebRtcIsac_kQArRc6Cdf[NUM_AR_RC_QUANT_BAUNDARY];
/* quantization boundary levels for reflection coefficients */
-extern const int16_t WebRtcIsac_kQArBoundaryLevels[12];
+extern const int16_t WebRtcIsac_kQArBoundaryLevels[NUM_AR_RC_QUANT_BAUNDARY];
/* initial indices for AR reflection coefficient quantizer and cdf table search */
extern const uint16_t WebRtcIsac_kQArRcInitIndex[AR_ORDER];
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/structs.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/structs.h
index 1bd73e75bd0..62c890c84bc 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/structs.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/isac/main/source/structs.h
@@ -18,10 +18,9 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_STRUCTS_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_STRUCTS_H_
-
-#include "typedefs.h"
-#include "settings.h"
-#include "isac.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h"
+#include "webrtc/modules/audio_coding/codecs/isac/main/source/settings.h"
+#include "webrtc/typedefs.h"
typedef struct Bitstreamstruct {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h
index 1370aff06a6..7998fdbdebf 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h
@@ -11,7 +11,7 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_INTERFACE_OPUS_INTERFACE_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_OPUS_INTERFACE_OPUS_INTERFACE_H_
-#include "typedefs.h"
+#include "webrtc/typedefs.h"
#ifdef __cplusplus
extern "C" {
@@ -59,6 +59,64 @@ int16_t WebRtcOpus_Encode(OpusEncInst* inst, int16_t* audio_in, int16_t samples,
*/
int16_t WebRtcOpus_SetBitRate(OpusEncInst* inst, int32_t rate);
+/****************************************************************************
+ * WebRtcOpus_SetPacketLossRate(...)
+ *
+ * This function configures the encoder's expected packet loss percentage.
+ *
+ * Input:
+ * - inst : Encoder context
+ * - loss_rate : loss percentage in the range 0-100, inclusive.
+ * Return value : 0 - Success
+ * -1 - Error
+ */
+int16_t WebRtcOpus_SetPacketLossRate(OpusEncInst* inst, int32_t loss_rate);
+
+/* TODO(minyue): Check whether an API to check the FEC and the packet loss rate
+ * is needed. It might not be very useful since there are not many use cases and
+ * the caller can always maintain the states. */
+
+/****************************************************************************
+ * WebRtcOpus_EnableFec()
+ *
+ * This function enables FEC for encoding.
+ *
+ * Input:
+ * - inst : Encoder context
+ *
+ * Return value : 0 - Success
+ * -1 - Error
+ */
+int16_t WebRtcOpus_EnableFec(OpusEncInst* inst);
+
+/****************************************************************************
+ * WebRtcOpus_DisableFec()
+ *
+ * This function disables FEC for encoding.
+ *
+ * Input:
+ * - inst : Encoder context
+ *
+ * Return value : 0 - Success
+ * -1 - Error
+ */
+int16_t WebRtcOpus_DisableFec(OpusEncInst* inst);
+
+/*
+ * WebRtcOpus_SetComplexity(...)
+ *
+ * This function adjusts the computational complexity. The effect is the same as
+ * calling the complexity setting of Opus as an Opus encoder related CTL.
+ *
+ * Input:
+ * - inst : Encoder context
+ * - complexity : New target complexity (0-10, inclusive)
+ *
+ * Return value : 0 - Success
+ * -1 - Error
+ */
+int16_t WebRtcOpus_SetComplexity(OpusEncInst* inst, int32_t complexity);
+
int16_t WebRtcOpus_DecoderCreate(OpusDecInst** inst, int channels);
int16_t WebRtcOpus_DecoderFree(OpusDecInst* inst);
@@ -113,6 +171,7 @@ int16_t WebRtcOpus_Decode(OpusDecInst* inst, const int16_t* encoded,
int16_t WebRtcOpus_DecodeSlave(OpusDecInst* inst, const int16_t* encoded,
int16_t encoded_bytes, int16_t* decoded,
int16_t* audio_type);
+
/****************************************************************************
* WebRtcOpus_DecodePlc(...)
* TODO(tlegrand): Remove master and slave functions when NetEq4 is in place.
@@ -138,6 +197,28 @@ int16_t WebRtcOpus_DecodePlcSlave(OpusDecInst* inst, int16_t* decoded,
int16_t number_of_lost_frames);
/****************************************************************************
+ * WebRtcOpus_DecodeFec(...)
+ *
+ * This function decodes the FEC data from an Opus packet into one or more audio
+ * frames at the ACM interface's sampling rate (32 kHz).
+ *
+ * Input:
+ * - inst : Decoder context
+ * - encoded : Encoded data
+ * - encoded_bytes : Bytes in encoded vector
+ *
+ * Output:
+ * - decoded : The decoded vector (previous frame)
+ *
+ * Return value : >0 - Samples per channel in decoded vector
+ * 0 - No FEC data in the packet
+ * -1 - Error
+ */
+int16_t WebRtcOpus_DecodeFec(OpusDecInst* inst, const uint8_t* encoded,
+ int16_t encoded_bytes, int16_t* decoded,
+ int16_t* audio_type);
+
+/****************************************************************************
* WebRtcOpus_DurationEst(...)
*
* This function calculates the duration of an opus packet.
@@ -152,6 +233,40 @@ int WebRtcOpus_DurationEst(OpusDecInst* inst,
const uint8_t* payload,
int payload_length_bytes);
+/* TODO(minyue): Check whether it is needed to add a decoder context to the
+ * arguments, like WebRtcOpus_DurationEst(...). In fact, the packet itself tells
+ * the duration. The decoder context in WebRtcOpus_DurationEst(...) is not used.
+ * So it may be advisable to remove it from WebRtcOpus_DurationEst(...). */
+
+/****************************************************************************
+ * WebRtcOpus_FecDurationEst(...)
+ *
+ * This function calculates the duration of the FEC data within an opus packet.
+ * Input:
+ * - payload : Encoded data pointer
+ * - payload_length_bytes : Bytes of encoded data
+ *
+ * Return value : >0 - The duration of the FEC data in the
+ * packet in samples.
+ * 0 - No FEC data in the packet.
+ */
+int WebRtcOpus_FecDurationEst(const uint8_t* payload,
+ int payload_length_bytes);
+
+/****************************************************************************
+ * WebRtcOpus_PacketHasFec(...)
+ *
+ * This function detects if an opus packet has FEC.
+ * Input:
+ * - payload : Encoded data pointer
+ * - payload_length_bytes : Bytes of encoded data
+ *
+ * Return value : 0 - the packet does NOT contain FEC.
+ * 1 - the packet contains FEC.
+ */
+int WebRtcOpus_PacketHasFec(const uint8_t* payload,
+ int payload_length_bytes);
+
#ifdef __cplusplus
} // extern "C"
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus.gypi b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus.gypi
index 406870232bb..b1dedd7d4a6 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus.gypi
@@ -32,4 +32,26 @@
],
},
],
+ 'conditions': [
+ ['include_tests==1', {
+ 'targets': [
+ {
+ 'target_name': 'webrtc_opus_fec_test',
+ 'type': 'executable',
+ 'dependencies': [
+ 'webrtc_opus',
+ '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
+ '<(webrtc_root)/test/test.gyp:test_support_main',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ ],
+ 'include_dirs': [
+ '<(webrtc_root)',
+ ],
+ 'sources': [
+ 'opus_fec_test.cc',
+ ],
+ },
+ ],
+ }],
+ ],
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc
new file mode 100644
index 00000000000..fb4cb04f361
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc
@@ -0,0 +1,249 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h"
+#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+using ::std::string;
+using ::std::tr1::tuple;
+using ::std::tr1::make_tuple;
+using ::std::tr1::get;
+using ::testing::TestWithParam;
+using ::testing::ValuesIn;
+
+namespace webrtc {
+
+// Define coding parameter as <channels, bit_rate, filename, extension>.
+typedef tuple<int, int, string, string> coding_param;
+typedef struct mode mode;
+
+struct mode {
+ bool fec;
+ uint8_t target_packet_loss_rate;
+};
+
+const int kOpusBlockDurationMs = 20;
+const int kOpusInputSamplingKhz = 48;
+const int kOpusOutputSamplingKhz = 32;
+
+class OpusFecTest : public TestWithParam<coding_param> {
+ protected:
+ OpusFecTest();
+
+ virtual void SetUp();
+ virtual void TearDown();
+
+ virtual void EncodeABlock();
+
+ virtual void DecodeABlock(bool lost_previous, bool lost_current);
+
+ int block_duration_ms_;
+ int input_sampling_khz_;
+ int output_sampling_khz_;
+
+ // Number of samples-per-channel in a frame.
+ int input_length_sample_;
+
+ // Expected output number of samples-per-channel in a frame.
+ int output_length_sample_;
+
+ int channels_;
+ int bit_rate_;
+
+ size_t data_pointer_;
+ size_t loop_length_samples_;
+ int max_bytes_;
+ int encoded_bytes_;
+
+ WebRtcOpusEncInst* opus_encoder_;
+ WebRtcOpusDecInst* opus_decoder_;
+
+ string in_filename_;
+
+ scoped_ptr<int16_t[]> in_data_;
+ scoped_ptr<int16_t[]> out_data_;
+ scoped_ptr<uint8_t[]> bit_stream_;
+};
+
+void OpusFecTest::SetUp() {
+ channels_ = get<0>(GetParam());
+ bit_rate_ = get<1>(GetParam());
+ printf("Coding %d channel signal at %d bps.\n", channels_, bit_rate_);
+
+ in_filename_ = test::ResourcePath(get<2>(GetParam()), get<3>(GetParam()));
+
+ FILE* fp = fopen(in_filename_.c_str(), "rb");
+ ASSERT_FALSE(fp == NULL);
+
+ // Obtain file size.
+ fseek(fp, 0, SEEK_END);
+ loop_length_samples_ = ftell(fp) / sizeof(int16_t);
+ rewind(fp);
+
+ // Allocate memory to contain the whole file.
+ in_data_.reset(new int16_t[loop_length_samples_ +
+ input_length_sample_ * channels_]);
+
+ // Copy the file into the buffer.
+ ASSERT_EQ(fread(&in_data_[0], sizeof(int16_t), loop_length_samples_, fp),
+ loop_length_samples_);
+ fclose(fp);
+
+ // The audio will be used in a looped manner. To ease the acquisition of an
+ // audio frame that crosses the end of the excerpt, we add an extra block
+ // length of samples to the end of the array, starting over again from the
+ // beginning of the array. Audio frames cross the end of the excerpt always
+ // appear as a continuum of memory.
+ memcpy(&in_data_[loop_length_samples_], &in_data_[0],
+ input_length_sample_ * channels_ * sizeof(int16_t));
+
+ // Maximum number of bytes in output bitstream.
+ max_bytes_ = input_length_sample_ * channels_ * sizeof(int16_t);
+
+ out_data_.reset(new int16_t[2 * output_length_sample_ * channels_]);
+ bit_stream_.reset(new uint8_t[max_bytes_]);
+
+ // Create encoder memory.
+ EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_encoder_, channels_));
+ EXPECT_EQ(0, WebRtcOpus_DecoderCreate(&opus_decoder_, channels_));
+ // Set bitrate.
+ EXPECT_EQ(0, WebRtcOpus_SetBitRate(opus_encoder_, bit_rate_));
+}
+
+void OpusFecTest::TearDown() {
+ // Free memory.
+ EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_encoder_));
+ EXPECT_EQ(0, WebRtcOpus_DecoderFree(opus_decoder_));
+}
+
+OpusFecTest::OpusFecTest()
+ : block_duration_ms_(kOpusBlockDurationMs),
+ input_sampling_khz_(kOpusInputSamplingKhz),
+ output_sampling_khz_(kOpusOutputSamplingKhz),
+ input_length_sample_(block_duration_ms_ * input_sampling_khz_),
+ output_length_sample_(block_duration_ms_ * output_sampling_khz_),
+ data_pointer_(0),
+ max_bytes_(0),
+ encoded_bytes_(0),
+ opus_encoder_(NULL),
+ opus_decoder_(NULL) {
+}
+
+void OpusFecTest::EncodeABlock() {
+ int16_t value = WebRtcOpus_Encode(opus_encoder_,
+ &in_data_[data_pointer_],
+ input_length_sample_,
+ max_bytes_, &bit_stream_[0]);
+ EXPECT_GT(value, 0);
+
+ encoded_bytes_ = value;
+}
+
+void OpusFecTest::DecodeABlock(bool lost_previous, bool lost_current) {
+ int16_t audio_type;
+ int16_t value_1 = 0, value_2 = 0;
+
+ if (lost_previous) {
+ // Decode previous frame.
+ if (!lost_current &&
+ WebRtcOpus_PacketHasFec(&bit_stream_[0], encoded_bytes_) == 1) {
+ value_1 = WebRtcOpus_DecodeFec(opus_decoder_, &bit_stream_[0],
+ encoded_bytes_, &out_data_[0],
+ &audio_type);
+ } else {
+ value_1 = WebRtcOpus_DecodePlc(opus_decoder_, &out_data_[0], 1);
+ }
+ EXPECT_EQ(output_length_sample_, value_1);
+ }
+
+ if (!lost_current) {
+ // Decode current frame.
+ value_2 = WebRtcOpus_DecodeNew(opus_decoder_, &bit_stream_[0],
+ encoded_bytes_,
+ &out_data_[value_1 * channels_],
+ &audio_type);
+ EXPECT_EQ(output_length_sample_, value_2);
+ }
+}
+
+TEST_P(OpusFecTest, RandomPacketLossTest) {
+ const int kDurationMs = 200000;
+ int time_now_ms, fec_frames;
+ int actual_packet_loss_rate;
+ bool lost_current, lost_previous;
+ mode mode_set[3] = {{true, 0},
+ {false, 0},
+ {true, 50}};
+
+ lost_current = false;
+ for (int i = 0; i < 3; i++) {
+ if (mode_set[i].fec) {
+ EXPECT_EQ(0, WebRtcOpus_EnableFec(opus_encoder_));
+ EXPECT_EQ(0, WebRtcOpus_SetPacketLossRate(opus_encoder_,
+ mode_set[i].target_packet_loss_rate));
+ printf("FEC is ON, target at packet loss rate %d percent.\n",
+ mode_set[i].target_packet_loss_rate);
+ } else {
+ EXPECT_EQ(0, WebRtcOpus_DisableFec(opus_encoder_));
+ printf("FEC is OFF.\n");
+ }
+ // In this test, we let the target packet loss rate match the actual rate.
+ actual_packet_loss_rate = mode_set[i].target_packet_loss_rate;
+ // Run every mode a certain time.
+ time_now_ms = 0;
+ fec_frames = 0;
+ while (time_now_ms < kDurationMs) {
+ // Encode & decode.
+ EncodeABlock();
+
+ // Check if payload has FEC.
+ int16_t fec = WebRtcOpus_PacketHasFec(&bit_stream_[0], encoded_bytes_);
+
+ // If FEC is disabled or the target packet loss rate is set to 0, there
+ // should be no FEC in the bit stream.
+ if (!mode_set[i].fec || mode_set[i].target_packet_loss_rate == 0) {
+ EXPECT_EQ(fec, 0);
+ } else if (fec == 1) {
+ fec_frames++;
+ }
+
+ lost_previous = lost_current;
+ lost_current = rand() < actual_packet_loss_rate * (RAND_MAX / 100);
+ DecodeABlock(lost_previous, lost_current);
+
+ time_now_ms += block_duration_ms_;
+
+ // |data_pointer_| is incremented and wrapped across
+ // |loop_length_samples_|.
+ data_pointer_ = (data_pointer_ + input_length_sample_ * channels_) %
+ loop_length_samples_;
+ }
+ if (mode_set[i].fec) {
+ printf("%.2f percent frames has FEC.\n",
+ static_cast<float>(fec_frames) * block_duration_ms_ / 2000);
+ }
+ }
+}
+
+const coding_param param_set[] =
+ {make_tuple(1, 64000, string("audio_coding/testfile32kHz"),
+ string("pcm")),
+ make_tuple(1, 32000, string("audio_coding/testfile32kHz"),
+ string("pcm")),
+ make_tuple(2, 64000, string("audio_coding/teststereo32kHz"),
+ string("pcm"))};
+
+// 64 kbps, stereo
+INSTANTIATE_TEST_CASE_P(AllTest, OpusFecTest,
+ ValuesIn(param_set));
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.c b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.c
index 98b924f219c..24fc4fc405a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.c
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_interface.c
@@ -103,7 +103,40 @@ int16_t WebRtcOpus_Encode(OpusEncInst* inst, int16_t* audio_in, int16_t samples,
int16_t WebRtcOpus_SetBitRate(OpusEncInst* inst, int32_t rate) {
if (inst) {
- return opus_encoder_ctl(inst->encoder, OPUS_SET_BITRATE(rate));
+ return opus_encoder_ctl(inst->encoder, OPUS_SET_BITRATE(rate));
+ } else {
+ return -1;
+ }
+}
+
+int16_t WebRtcOpus_SetPacketLossRate(OpusEncInst* inst, int32_t loss_rate) {
+ if (inst) {
+ return opus_encoder_ctl(inst->encoder,
+ OPUS_SET_PACKET_LOSS_PERC(loss_rate));
+ } else {
+ return -1;
+ }
+}
+
+int16_t WebRtcOpus_EnableFec(OpusEncInst* inst) {
+ if (inst) {
+ return opus_encoder_ctl(inst->encoder, OPUS_SET_INBAND_FEC(1));
+ } else {
+ return -1;
+ }
+}
+
+int16_t WebRtcOpus_DisableFec(OpusEncInst* inst) {
+ if (inst) {
+ return opus_encoder_ctl(inst->encoder, OPUS_SET_INBAND_FEC(0));
+ } else {
+ return -1;
+ }
+}
+
+int16_t WebRtcOpus_SetComplexity(OpusEncInst* inst, int32_t complexity) {
+ if (inst) {
+ return opus_encoder_ctl(inst->encoder, OPUS_SET_COMPLEXITY(complexity));
} else {
return -1;
}
@@ -217,6 +250,23 @@ static int DecodeNative(OpusDecoder* inst, const int16_t* encoded,
return -1;
}
+static int DecodeFec(OpusDecoder* inst, const int16_t* encoded,
+ int16_t encoded_bytes, int frame_size,
+ int16_t* decoded, int16_t* audio_type) {
+ unsigned char* coded = (unsigned char*) encoded;
+ opus_int16* audio = (opus_int16*) decoded;
+
+ int res = opus_decode(inst, coded, encoded_bytes, audio, frame_size, 1);
+
+ /* TODO(tlegrand): set to DTX for zero-length packets? */
+ *audio_type = 0;
+
+ if (res > 0) {
+ return res;
+ }
+ return -1;
+}
+
/* Resample from 48 to 32 kHz. Length of state is assumed to be
* kWebRtcOpusStateSize (7).
*/
@@ -542,6 +592,52 @@ int16_t WebRtcOpus_DecodePlcSlave(OpusDecInst* inst, int16_t* decoded,
return resampled_samples;
}
+int16_t WebRtcOpus_DecodeFec(OpusDecInst* inst, const uint8_t* encoded,
+ int16_t encoded_bytes, int16_t* decoded,
+ int16_t* audio_type) {
+ /* |buffer| is big enough for 120 ms (the largest Opus packet size) of stereo
+ * audio at 48 kHz. */
+ int16_t buffer[kWebRtcOpusMaxFrameSize];
+ int16_t* coded = (int16_t*)encoded;
+ int decoded_samples;
+ int resampled_samples;
+ int fec_samples;
+
+ if (WebRtcOpus_PacketHasFec(encoded, encoded_bytes) != 1) {
+ return 0;
+ }
+
+ fec_samples = opus_packet_get_samples_per_frame(encoded, 48000);
+
+ /* Decode to a temporary buffer. */
+ decoded_samples = DecodeFec(inst->decoder_left, coded, encoded_bytes,
+ fec_samples, buffer, audio_type);
+ if (decoded_samples < 0) {
+ return -1;
+ }
+
+ /* If mono case, just do a regular call to the decoder.
+ * If stereo, we need to de-interleave the stereo output into blocks with
+ * left and right channel. Each block is resampled to 32 kHz, and then
+ * interleaved again. */
+ if (inst->channels == 2) {
+ /* De-interleave and resample. */
+ resampled_samples = WebRtcOpus_DeInterleaveResample(inst,
+ buffer,
+ decoded_samples,
+ decoded);
+ } else {
+ /* Resample from 48 kHz to 32 kHz. Filter state memory for left channel is
+ * used for mono signals. */
+ resampled_samples = WebRtcOpus_Resample48to32(buffer,
+ decoded_samples,
+ inst->state_48_32_left,
+ decoded);
+ }
+
+ return resampled_samples;
+}
+
int WebRtcOpus_DurationEst(OpusDecInst* inst,
const uint8_t* payload,
int payload_length_bytes) {
@@ -562,3 +658,79 @@ int WebRtcOpus_DurationEst(OpusDecInst* inst,
samples = samples * 2 / 3;
return samples;
}
+
+int WebRtcOpus_FecDurationEst(const uint8_t* payload,
+ int payload_length_bytes) {
+ int samples;
+ if (WebRtcOpus_PacketHasFec(payload, payload_length_bytes) != 1) {
+ return 0;
+ }
+
+ samples = opus_packet_get_samples_per_frame(payload, 48000);
+ if (samples < 480 || samples > 5760) {
+ /* Invalid payload duration. */
+ return 0;
+ }
+ /* Compensate for the down-sampling from 48 kHz to 32 kHz.
+ * This should be removed when the resampling in WebRtcOpus_Decode is
+ * removed. */
+ samples = samples * 2 / 3;
+ return samples;
+}
+
+int WebRtcOpus_PacketHasFec(const uint8_t* payload,
+ int payload_length_bytes) {
+ int frames, channels, payload_length_ms;
+ int n;
+ opus_int16 frame_sizes[48];
+ const unsigned char *frame_data[48];
+
+ if (payload == NULL || payload_length_bytes <= 0)
+ return 0;
+
+ /* In CELT_ONLY mode, packets should not have FEC. */
+ if (payload[0] & 0x80)
+ return 0;
+
+ payload_length_ms = opus_packet_get_samples_per_frame(payload, 48000) / 48;
+ if (10 > payload_length_ms)
+ payload_length_ms = 10;
+
+ channels = opus_packet_get_nb_channels(payload);
+
+ switch (payload_length_ms) {
+ case 10:
+ case 20: {
+ frames = 1;
+ break;
+ }
+ case 40: {
+ frames = 2;
+ break;
+ }
+ case 60: {
+ frames = 3;
+ break;
+ }
+ default: {
+ return 0; // It is actually even an invalid packet.
+ }
+ }
+
+ /* The following is to parse the LBRR flags. */
+ if (opus_packet_parse(payload, payload_length_bytes, NULL, frame_data,
+ frame_sizes, NULL) < 0) {
+ return 0;
+ }
+
+ if (frame_sizes[0] <= 1) {
+ return 0;
+ }
+
+ for (n = 0; n < channels; n++) {
+ if (frame_data[0][0] & (0x80 >> ((n + 1) * (frames + 1) - 1)))
+ return 1;
+ }
+
+ return 0;
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc
new file mode 100644
index 00000000000..16099c6d93a
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc
@@ -0,0 +1,119 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h"
+#include "webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h"
+
+using ::std::string;
+using ::std::tr1::make_tuple;
+using ::testing::ValuesIn;
+
+namespace webrtc {
+
+static const int kOpusBlockDurationMs = 20;
+static const int kOpusInputSamplingKhz = 48;
+static const int kOpustOutputSamplingKhz = 32;
+
+class OpusSpeedTest : public AudioCodecSpeedTest {
+ protected:
+ OpusSpeedTest();
+ virtual void SetUp() OVERRIDE;
+ virtual void TearDown() OVERRIDE;
+ virtual float EncodeABlock(int16_t* in_data, uint8_t* bit_stream,
+ int max_bytes, int* encoded_bytes);
+ virtual float DecodeABlock(const uint8_t* bit_stream, int encoded_bytes,
+ int16_t* out_data);
+ WebRtcOpusEncInst* opus_encoder_;
+ WebRtcOpusDecInst* opus_decoder_;
+};
+
+OpusSpeedTest::OpusSpeedTest()
+ : AudioCodecSpeedTest(kOpusBlockDurationMs,
+ kOpusInputSamplingKhz,
+ kOpustOutputSamplingKhz),
+ opus_encoder_(NULL),
+ opus_decoder_(NULL) {
+}
+
+void OpusSpeedTest::SetUp() {
+ AudioCodecSpeedTest::SetUp();
+ /* Create encoder memory. */
+ EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_encoder_, channels_));
+ EXPECT_EQ(0, WebRtcOpus_DecoderCreate(&opus_decoder_, channels_));
+ /* Set bitrate. */
+ EXPECT_EQ(0, WebRtcOpus_SetBitRate(opus_encoder_, bit_rate_));
+}
+
+void OpusSpeedTest::TearDown() {
+ AudioCodecSpeedTest::TearDown();
+ /* Free memory. */
+ EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_encoder_));
+ EXPECT_EQ(0, WebRtcOpus_DecoderFree(opus_decoder_));
+}
+
+float OpusSpeedTest::EncodeABlock(int16_t* in_data, uint8_t* bit_stream,
+ int max_bytes, int* encoded_bytes) {
+ clock_t clocks = clock();
+ int value = WebRtcOpus_Encode(opus_encoder_, in_data,
+ input_length_sample_, max_bytes,
+ bit_stream);
+ clocks = clock() - clocks;
+ EXPECT_GT(value, 0);
+ *encoded_bytes = value;
+ return 1000.0 * clocks / CLOCKS_PER_SEC;
+}
+
+float OpusSpeedTest::DecodeABlock(const uint8_t* bit_stream,
+ int encoded_bytes, int16_t* out_data) {
+ int value;
+ int16_t audio_type;
+ clock_t clocks = clock();
+ value = WebRtcOpus_DecodeNew(opus_decoder_, bit_stream, encoded_bytes,
+ out_data, &audio_type);
+ clocks = clock() - clocks;
+ EXPECT_EQ(output_length_sample_, value);
+ return 1000.0 * clocks / CLOCKS_PER_SEC;
+}
+
+#define ADD_TEST(complexity) \
+TEST_P(OpusSpeedTest, OpusSetComplexityTest##complexity) { \
+ /* Test audio length in second. */ \
+ size_t kDurationSec = 400; \
+ /* Set complexity. */ \
+ printf("Setting complexity to %d ...\n", complexity); \
+ EXPECT_EQ(0, WebRtcOpus_SetComplexity(opus_encoder_, complexity)); \
+ EncodeDecode(kDurationSec); \
+}
+
+ADD_TEST(10);
+ADD_TEST(9);
+ADD_TEST(8);
+ADD_TEST(7);
+ADD_TEST(6);
+ADD_TEST(5);
+ADD_TEST(4);
+ADD_TEST(3);
+ADD_TEST(2);
+ADD_TEST(1);
+ADD_TEST(0);
+
+// List all test cases: (channel, bit rat, filename, extension).
+const coding_param param_set[] =
+ {make_tuple(1, 64000, string("audio_coding/speech_mono_32_48kHz"),
+ string("pcm"), true),
+ make_tuple(1, 32000, string("audio_coding/speech_mono_32_48kHz"),
+ string("pcm"), true),
+ make_tuple(2, 64000, string("audio_coding/music_stereo_48kHz"),
+ string("pcm"), true)};
+
+INSTANTIATE_TEST_CASE_P(AllTest, OpusSpeedTest,
+ ValuesIn(param_set));
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc
index b699cf9df5e..ed876cd1050 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/opus/opus_unittest.cc
@@ -202,6 +202,27 @@ TEST_F(OpusTest, OpusSetBitRate) {
EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_stereo_encoder_));
}
+TEST_F(OpusTest, OpusSetComplexity) {
+ // Test without creating encoder memory.
+ EXPECT_EQ(-1, WebRtcOpus_SetComplexity(opus_mono_encoder_, 9));
+ EXPECT_EQ(-1, WebRtcOpus_SetComplexity(opus_stereo_encoder_, 9));
+
+ // Create encoder memory, try with different complexities.
+ EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_mono_encoder_, 1));
+ EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_stereo_encoder_, 2));
+
+ EXPECT_EQ(0, WebRtcOpus_SetComplexity(opus_mono_encoder_, 0));
+ EXPECT_EQ(0, WebRtcOpus_SetComplexity(opus_stereo_encoder_, 0));
+ EXPECT_EQ(0, WebRtcOpus_SetComplexity(opus_mono_encoder_, 10));
+ EXPECT_EQ(0, WebRtcOpus_SetComplexity(opus_stereo_encoder_, 10));
+ EXPECT_EQ(-1, WebRtcOpus_SetComplexity(opus_mono_encoder_, 11));
+ EXPECT_EQ(-1, WebRtcOpus_SetComplexity(opus_stereo_encoder_, 11));
+
+ // Free memory.
+ EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_mono_encoder_));
+ EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_stereo_encoder_));
+}
+
// Encode and decode one frame (stereo), initialize the decoder and
// decode once more.
TEST_F(OpusTest, OpusDecodeInit) {
@@ -265,6 +286,47 @@ TEST_F(OpusTest, OpusDecodeInit) {
EXPECT_EQ(0, WebRtcOpus_DecoderFree(opus_stereo_decoder_new_));
}
+TEST_F(OpusTest, OpusEnableDisableFec) {
+ // Test without creating encoder memory.
+ EXPECT_EQ(-1, WebRtcOpus_EnableFec(opus_mono_encoder_));
+ EXPECT_EQ(-1, WebRtcOpus_DisableFec(opus_stereo_encoder_));
+
+ // Create encoder memory, try with different bitrates.
+ EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_mono_encoder_, 1));
+ EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_stereo_encoder_, 2));
+
+ EXPECT_EQ(0, WebRtcOpus_EnableFec(opus_mono_encoder_));
+ EXPECT_EQ(0, WebRtcOpus_EnableFec(opus_stereo_encoder_));
+ EXPECT_EQ(0, WebRtcOpus_DisableFec(opus_mono_encoder_));
+ EXPECT_EQ(0, WebRtcOpus_DisableFec(opus_stereo_encoder_));
+
+ // Free memory.
+ EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_mono_encoder_));
+ EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_stereo_encoder_));
+}
+
+TEST_F(OpusTest, OpusSetPacketLossRate) {
+ // Test without creating encoder memory.
+ EXPECT_EQ(-1, WebRtcOpus_SetPacketLossRate(opus_mono_encoder_, 50));
+ EXPECT_EQ(-1, WebRtcOpus_SetPacketLossRate(opus_stereo_encoder_, 50));
+
+ // Create encoder memory, try with different bitrates.
+ EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_mono_encoder_, 1));
+ EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_stereo_encoder_, 2));
+
+ EXPECT_EQ(0, WebRtcOpus_SetPacketLossRate(opus_mono_encoder_, 50));
+ EXPECT_EQ(0, WebRtcOpus_SetPacketLossRate(opus_stereo_encoder_, 50));
+ EXPECT_EQ(-1, WebRtcOpus_SetPacketLossRate(opus_mono_encoder_, -1));
+ EXPECT_EQ(-1, WebRtcOpus_SetPacketLossRate(opus_stereo_encoder_, -1));
+ EXPECT_EQ(-1, WebRtcOpus_SetPacketLossRate(opus_mono_encoder_, 101));
+ EXPECT_EQ(-1, WebRtcOpus_SetPacketLossRate(opus_stereo_encoder_, 101));
+
+ // Free memory.
+ EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_mono_encoder_));
+ EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_stereo_encoder_));
+}
+
+
// PLC in mono mode.
TEST_F(OpusTest, OpusDecodePlcMono) {
// Create encoder memory.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/pcm16b/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/OWNERS
new file mode 100644
index 00000000000..bbffda7e492
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/OWNERS
@@ -0,0 +1,6 @@
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc
new file mode 100644
index 00000000000..c7cafdff9ba
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc
@@ -0,0 +1,124 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+using ::std::tr1::get;
+
+namespace webrtc {
+
+AudioCodecSpeedTest::AudioCodecSpeedTest(int block_duration_ms,
+ int input_sampling_khz,
+ int output_sampling_khz)
+ : block_duration_ms_(block_duration_ms),
+ input_sampling_khz_(input_sampling_khz),
+ output_sampling_khz_(output_sampling_khz),
+ input_length_sample_(block_duration_ms_ * input_sampling_khz_),
+ output_length_sample_(block_duration_ms_ * output_sampling_khz_),
+ data_pointer_(0),
+ loop_length_samples_(0),
+ max_bytes_(0),
+ encoded_bytes_(0),
+ encoding_time_ms_(0.0),
+ decoding_time_ms_(0.0),
+ out_file_(NULL) {
+}
+
+void AudioCodecSpeedTest::SetUp() {
+ channels_ = get<0>(GetParam());
+ bit_rate_ = get<1>(GetParam());
+ in_filename_ = test::ResourcePath(get<2>(GetParam()), get<3>(GetParam()));
+ save_out_data_ = get<4>(GetParam());
+
+ FILE* fp = fopen(in_filename_.c_str(), "rb");
+ assert(fp != NULL);
+
+ // Obtain file size.
+ fseek(fp, 0, SEEK_END);
+ loop_length_samples_ = ftell(fp) / sizeof(int16_t);
+ rewind(fp);
+
+ // Allocate memory to contain the whole file.
+ in_data_.reset(new int16_t[loop_length_samples_ +
+ input_length_sample_ * channels_]);
+
+ data_pointer_ = 0;
+
+ // Copy the file into the buffer.
+ ASSERT_EQ(fread(&in_data_[0], sizeof(int16_t), loop_length_samples_, fp),
+ loop_length_samples_);
+ fclose(fp);
+
+ // Add an extra block length of samples to the end of the array, starting
+ // over again from the beginning of the array. This is done to simplify
+ // the reading process when reading over the end of the loop.
+ memcpy(&in_data_[loop_length_samples_], &in_data_[0],
+ input_length_sample_ * channels_ * sizeof(int16_t));
+
+ max_bytes_ = input_length_sample_ * channels_ * sizeof(int16_t);
+ out_data_.reset(new int16_t[output_length_sample_ * channels_]);
+ bit_stream_.reset(new uint8_t[max_bytes_]);
+
+ if (save_out_data_) {
+ std::string out_filename =
+ ::testing::UnitTest::GetInstance()->current_test_info()->name();
+
+ // Erase '/'
+ size_t found;
+ while ((found = out_filename.find('/')) != std::string::npos)
+ out_filename.replace(found, 1, "_");
+
+ out_filename = test::OutputPath() + out_filename + ".pcm";
+
+ out_file_ = fopen(out_filename.c_str(), "wb");
+ assert(out_file_ != NULL);
+
+ printf("Output to be saved in %s.\n", out_filename.c_str());
+ }
+}
+
+void AudioCodecSpeedTest::TearDown() {
+ if (save_out_data_) {
+ fclose(out_file_);
+ }
+}
+
+void AudioCodecSpeedTest::EncodeDecode(size_t audio_duration_sec) {
+ size_t time_now_ms = 0;
+ float time_ms;
+
+ printf("Coding %d kHz-sampled %d-channel audio at %d bps ...\n",
+ input_sampling_khz_, channels_, bit_rate_);
+
+ while (time_now_ms < audio_duration_sec * 1000) {
+ // Encode & decode.
+ time_ms = EncodeABlock(&in_data_[data_pointer_], &bit_stream_[0],
+ max_bytes_, &encoded_bytes_);
+ encoding_time_ms_ += time_ms;
+ time_ms = DecodeABlock(&bit_stream_[0], encoded_bytes_, &out_data_[0]);
+ decoding_time_ms_ += time_ms;
+ if (save_out_data_) {
+ fwrite(&out_data_[0], sizeof(int16_t),
+ output_length_sample_ * channels_, out_file_);
+ }
+ data_pointer_ = (data_pointer_ + input_length_sample_ * channels_) %
+ loop_length_samples_;
+ time_now_ms += block_duration_ms_;
+ }
+
+ printf("Encoding: %.2f%% real time,\nDecoding: %.2f%% real time.\n",
+ (encoding_time_ms_ / audio_duration_sec) / 10.0,
+ (decoding_time_ms_ / audio_duration_sec) / 10.0);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h
new file mode 100644
index 00000000000..2c9b45e4f86
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_TOOLS_AUDIO_CODEC_SPEED_TEST_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_TOOLS_AUDIO_CODEC_SPEED_TEST_H_
+
+#include <string>
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Define coding parameter as
+// <channels, bit_rate, file_name, extension, if_save_output>.
+typedef std::tr1::tuple<int, int, std::string, std::string, bool> coding_param;
+
+class AudioCodecSpeedTest : public testing::TestWithParam<coding_param> {
+ protected:
+ AudioCodecSpeedTest(int block_duration_ms,
+ int input_sampling_khz,
+ int output_sampling_khz);
+ virtual void SetUp();
+ virtual void TearDown();
+
+ // EncodeABlock(...) does the following:
+ // 1. encodes a block of audio, saved in |in_data|,
+ // 2. save the bit stream to |bit_stream| of |max_bytes| bytes in size,
+ // 3. assign |encoded_bytes| with the length of the bit stream (in bytes),
+ // 4. return the cost of time (in millisecond) spent on actual encoding.
+ virtual float EncodeABlock(int16_t* in_data, uint8_t* bit_stream,
+ int max_bytes, int* encoded_bytes) = 0;
+
+ // DecodeABlock(...) does the following:
+ // 1. decodes the bit stream in |bit_stream| with a length of |encoded_bytes|
+ // (in bytes),
+ // 2. save the decoded audio in |out_data|,
+ // 3. return the cost of time (in millisecond) spent on actual decoding.
+ virtual float DecodeABlock(const uint8_t* bit_stream, int encoded_bytes,
+ int16_t* out_data) = 0;
+
+ // Encoding and decode an audio of |audio_duration| (in seconds) and
+ // record the runtime for encoding and decoding separately.
+ void EncodeDecode(size_t audio_duration);
+
+ int block_duration_ms_;
+ int input_sampling_khz_;
+ int output_sampling_khz_;
+
+ // Number of samples-per-channel in a frame.
+ int input_length_sample_;
+
+ // Expected output number of samples-per-channel in a frame.
+ int output_length_sample_;
+
+ scoped_ptr<int16_t[]> in_data_;
+ scoped_ptr<int16_t[]> out_data_;
+ size_t data_pointer_;
+ size_t loop_length_samples_;
+ scoped_ptr<uint8_t[]> bit_stream_;
+
+ // Maximum number of bytes in output bitstream for a frame of audio.
+ int max_bytes_;
+
+ int encoded_bytes_;
+ float encoding_time_ms_;
+ float decoding_time_ms_;
+ FILE* out_file_;
+
+ int channels_;
+
+ // Bit rate is in bit-per-second.
+ int bit_rate_;
+
+ std::string in_filename_;
+
+ // Determines whether to save the output to file.
+ bool save_out_data_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_TOOLS_AUDIO_CODEC_SPEED_TEST_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_tests.gypi b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_tests.gypi
new file mode 100644
index 00000000000..4d675e10cfa
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_tests.gypi
@@ -0,0 +1,71 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'audio_codec_speed_tests',
+ 'type': '<(gtest_target_type)',
+ 'dependencies': [
+ 'audio_processing',
+ 'iSACFix',
+ 'webrtc_opus',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/test/test.gyp:test_support_main',
+ ],
+ 'sources': [
+ 'audio_codec_speed_test.h',
+ 'audio_codec_speed_test.cc',
+ '<(webrtc_root)/modules/audio_coding/codecs/opus/opus_speed_test.cc',
+ '<(webrtc_root)/modules/audio_coding/codecs/isac/fix/test/isac_speed_test.cc',
+ ],
+ 'conditions': [
+ # TODO(henrike): remove build_with_chromium==1 when the bots are
+ # using Chromium's buildbots.
+ ['build_with_chromium==1 and OS=="android"', {
+ 'dependencies': [
+ '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
+ ],
+ }],
+ ],
+ }],
+ 'conditions': [
+ # TODO(henrike): remove build_with_chromium==1 when the bots are using
+ # Chromium's buildbots.
+ ['build_with_chromium==1 and OS=="android"', {
+ 'targets': [
+ {
+ 'target_name': 'audio_codec_speed_tests_apk_target',
+ 'type': 'none',
+ 'dependencies': [
+ '<(apk_tests_path):audio_codec_speed_tests_apk',
+ ],
+ },
+ ],
+ }],
+ ['test_isolation_mode != "noop"', {
+ 'targets': [
+ {
+ 'target_name': 'audio_codec_speed_tests_run',
+ 'type': 'none',
+ 'dependencies': [
+ 'audio_codec_speed_tests',
+ ],
+ 'includes': [
+ '../../../../build/isolate.gypi',
+ 'audio_codec_speed_tests.isolate',
+ ],
+ 'sources': [
+ 'audio_codec_speed_tests.isolate',
+ ],
+ },
+ ],
+ }],
+ ],
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_tests.isolate b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_tests.isolate
new file mode 100644
index 00000000000..8c5a2bd0ec0
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_tests.isolate
@@ -0,0 +1,40 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'conditions': [
+ ['OS=="android"', {
+ 'variables': {
+ 'isolate_dependency_untracked': [
+ '<(DEPTH)/resources/',
+ '<(DEPTH)/data/',
+ ],
+ },
+ }],
+ ['OS=="linux" or OS=="mac" or OS=="win"', {
+ 'variables': {
+ 'command': [
+ '<(DEPTH)/testing/test_env.py',
+ '<(PRODUCT_DIR)/audio_codec_speed_tests<(EXECUTABLE_SUFFIX)',
+ ],
+ 'isolate_dependency_touched': [
+ '<(DEPTH)/DEPS',
+ ],
+ 'isolate_dependency_tracked': [
+ '<(DEPTH)/resources/audio_coding/music_stereo_48kHz.pcm',
+ '<(DEPTH)/resources/audio_coding/speech_mono_16kHz.pcm',
+ '<(DEPTH)/resources/audio_coding/speech_mono_32_48kHz.pcm',
+ '<(DEPTH)/testing/test_env.py',
+ '<(PRODUCT_DIR)/audio_codec_speed_tests<(EXECUTABLE_SUFFIX)',
+ ],
+ 'isolate_dependency_untracked': [
+ '<(DEPTH)/tools/swarming_client/',
+ ],
+ },
+ }],
+ ],
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/main/OWNERS
index e1e6256ca48..83880d21dc8 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/OWNERS
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/OWNERS
@@ -1,3 +1,4 @@
tina.legrand@webrtc.org
turaj@webrtc.org
jan.skoglund@webrtc.org
+henrik.lundin@webrtc.org
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.cc
index fd30a137ae0..e55b6c4660f 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.cc
@@ -20,7 +20,7 @@
#include <assert.h>
#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
#include "webrtc/system_wrappers/interface/trace.h"
// Includes needed to create the codecs.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.h b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.h
index 98869efeead..65be793e37b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_codec_database.h
@@ -18,7 +18,7 @@
#include "webrtc/common_types.h"
#include "webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_g722.h b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_g722.h
index 7216a574af8..6197a9d93ad 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_g722.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_g722.h
@@ -12,6 +12,7 @@
#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_G722_H_
#include "webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
typedef struct WebRtcG722EncInst G722EncInst;
typedef struct WebRtcG722DecInst G722DecInst;
@@ -40,7 +41,8 @@ class ACMG722 : public ACMGenericCodec {
int32_t Add10MsDataSafe(const uint32_t timestamp,
const int16_t* data,
const uint16_t length_smpl,
- const uint8_t audio_channel);
+ const uint8_t audio_channel)
+ EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
void DestructEncoderSafe();
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.cc
index aa8e8be0637..a4808c0e384 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.cc
@@ -26,7 +26,7 @@ namespace acm2 {
// Enum for CNG
enum {
kMaxPLCParamsCNG = WEBRTC_CNG_MAX_LPC_ORDER,
- kNewCNGNumPLCParams = 8
+ kNewCNGNumLPCParams = 8
};
// Interval for sending new CNG parameters (SID frames) is 100 msec.
@@ -56,9 +56,10 @@ ACMGenericCodec::ACMGenericCodec()
vad_mode_(VADNormal),
dtx_enabled_(false),
ptr_dtx_inst_(NULL),
- num_lpc_params_(kNewCNGNumPLCParams),
+ num_lpc_params_(kNewCNGNumLPCParams),
sent_cn_previous_(false),
prev_frame_cng_(0),
+ has_internal_fec_(false),
neteq_decode_lock_(NULL),
codec_wrapper_lock_(*RWLockWrapper::CreateRWLock()),
last_timestamp_(0xD87F3F9F),
@@ -546,7 +547,7 @@ void ACMGenericCodec::DestructEncoder() {
WebRtcCng_FreeEnc(ptr_dtx_inst_);
ptr_dtx_inst_ = NULL;
}
- num_lpc_params_ = kNewCNGNumPLCParams;
+ num_lpc_params_ = kNewCNGNumLPCParams;
DestructEncoderSafe();
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h
index d41580fff54..fa21ca015aa 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h
@@ -13,9 +13,10 @@
#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
#include "webrtc/system_wrappers/interface/trace.h"
#define MAX_FRAME_SIZE_10MSEC 6
@@ -560,6 +561,46 @@ class ACMGenericCodec {
//
virtual AudioDecoder* Decoder(int /* codec_id */) { return NULL; }
+ ///////////////////////////////////////////////////////////////////////////
+ // bool HasInternalFEC()
+ // Used to check if the codec has internal FEC.
+ //
+ // Return value:
+ // true if the codec has an internal FEC, e.g. Opus.
+ // false otherwise.
+ //
+ bool HasInternalFEC() const { return has_internal_fec_; }
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int SetFEC();
+ // Sets the codec internal FEC. No effects on codecs that do not provide
+ // internal FEC.
+ //
+ // Input:
+ // -enable_fec : if true FEC will be enabled otherwise the FEC is
+ // disabled.
+ //
+ // Return value:
+ // -1 if failed, or the codec does not support FEC
+ // 0 if succeeded.
+ //
+ virtual int SetFEC(bool /* enable_fec */) { return -1; }
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int SetPacketLossRate()
+ // Sets expected packet loss rate for encoding. Some encoders provide packet
+ // loss gnostic encoding to make stream less sensitive to packet losses,
+ // through e.g., FEC. No effects on codecs that do not provide such encoding.
+ //
+ // Input:
+ // -loss_rate : expected packet loss rate (0 -- 100 inclusive).
+ //
+ // Return value:
+ // -1 if failed,
+ // 0 if succeeded or packet loss rate is ignored.
+ //
+ virtual int SetPacketLossRate(int /* loss_rate */) { return 0; }
+
protected:
///////////////////////////////////////////////////////////////////////////
// All the functions with FunctionNameSafe(...) contain the actual
@@ -576,7 +617,8 @@ class ACMGenericCodec {
virtual int32_t Add10MsDataSafe(const uint32_t timestamp,
const int16_t* data,
const uint16_t length,
- const uint8_t audio_channel);
+ const uint8_t audio_channel)
+ EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
///////////////////////////////////////////////////////////////////////////
// See EncoderParam() for the description of function, input(s)/output(s)
@@ -588,14 +630,15 @@ class ACMGenericCodec {
// See ResetEncoder() for the description of function, input(s)/output(s)
// and return value.
//
- int16_t ResetEncoderSafe();
+ int16_t ResetEncoderSafe() EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
///////////////////////////////////////////////////////////////////////////
// See InitEncoder() for the description of function, input(s)/output(s)
// and return value.
//
int16_t InitEncoderSafe(WebRtcACMCodecParams* codec_params,
- bool force_initialization);
+ bool force_initialization)
+ EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
///////////////////////////////////////////////////////////////////////////
// See InitDecoder() for the description of function, input(s)/output(s)
@@ -641,7 +684,8 @@ class ACMGenericCodec {
// See SetVAD() for the description of function, input(s)/output(s) and
// return value.
//
- int16_t SetVADSafe(bool* enable_dtx, bool* enable_vad, ACMVADMode* mode);
+ int16_t SetVADSafe(bool* enable_dtx, bool* enable_vad, ACMVADMode* mode)
+ EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
///////////////////////////////////////////////////////////////////////////
// See ReplaceInternalDTX() for the description of function, input and
@@ -678,7 +722,8 @@ class ACMGenericCodec {
// -1 if failed,
// 0 if succeeded.
//
- int16_t EnableVAD(ACMVADMode mode);
+ int16_t EnableVAD(ACMVADMode mode)
+ EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
///////////////////////////////////////////////////////////////////////////
// int16_t DisableVAD()
@@ -688,7 +733,7 @@ class ACMGenericCodec {
// -1 if failed,
// 0 if succeeded.
//
- int16_t DisableVAD();
+ int16_t DisableVAD() EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
///////////////////////////////////////////////////////////////////////////
// int16_t EnableDTX()
@@ -699,7 +744,7 @@ class ACMGenericCodec {
// -1 if failed,
// 0 if succeeded.
//
- virtual int16_t EnableDTX();
+ virtual int16_t EnableDTX() EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
///////////////////////////////////////////////////////////////////////////
// int16_t DisableDTX()
@@ -710,7 +755,7 @@ class ACMGenericCodec {
// -1 if failed,
// 0 if succeeded.
//
- virtual int16_t DisableDTX();
+ virtual int16_t DisableDTX() EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
///////////////////////////////////////////////////////////////////////////
// int16_t InternalEncode()
@@ -838,7 +883,8 @@ class ACMGenericCodec {
//
int16_t ProcessFrameVADDTX(uint8_t* bitstream,
int16_t* bitstream_len_byte,
- int16_t* samples_processed);
+ int16_t* samples_processed)
+ EXCLUSIVE_LOCKS_REQUIRED(codec_wrapper_lock_);
///////////////////////////////////////////////////////////////////////////
// CurrentRate()
@@ -885,19 +931,23 @@ class ACMGenericCodec {
// True if the encoder instance initialized
bool encoder_initialized_;
- bool registered_in_neteq_;
+ const bool registered_in_neteq_; // TODO(henrik.lundin) Remove?
// VAD/DTX
bool has_internal_dtx_;
- WebRtcVadInst* ptr_vad_inst_;
- bool vad_enabled_;
- ACMVADMode vad_mode_;
- int16_t vad_label_[MAX_FRAME_SIZE_10MSEC];
- bool dtx_enabled_;
- WebRtcCngEncInst* ptr_dtx_inst_;
- uint8_t num_lpc_params_;
- bool sent_cn_previous_;
- int16_t prev_frame_cng_;
+ WebRtcVadInst* ptr_vad_inst_ GUARDED_BY(codec_wrapper_lock_);
+ bool vad_enabled_ GUARDED_BY(codec_wrapper_lock_);
+ ACMVADMode vad_mode_ GUARDED_BY(codec_wrapper_lock_);
+ int16_t vad_label_[MAX_FRAME_SIZE_10MSEC] GUARDED_BY(codec_wrapper_lock_);
+ bool dtx_enabled_ GUARDED_BY(codec_wrapper_lock_);
+ WebRtcCngEncInst* ptr_dtx_inst_ GUARDED_BY(codec_wrapper_lock_);
+ uint8_t num_lpc_params_ // TODO(henrik.lundin) Delete and
+ GUARDED_BY(codec_wrapper_lock_); // replace with kNewCNGNumLPCParams.
+ bool sent_cn_previous_ GUARDED_BY(codec_wrapper_lock_);
+ int16_t prev_frame_cng_ GUARDED_BY(codec_wrapper_lock_);
+
+ // FEC.
+ bool has_internal_fec_;
WebRtcACMCodecParams encoder_params_;
@@ -909,7 +959,7 @@ class ACMGenericCodec {
// such as buffers and state variables.
RWLockWrapper& codec_wrapper_lock_;
- uint32_t last_timestamp_;
+ uint32_t last_timestamp_ GUARDED_BY(codec_wrapper_lock_);
uint32_t unique_id_;
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.cc
index e27284212fd..9fbcdd4cd8b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.cc
@@ -14,7 +14,8 @@
#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
#ifdef WEBRTC_CODEC_ISAC
@@ -59,14 +60,15 @@ static const int32_t kIsacRatesSwb[NR_ISAC_BANDWIDTHS] = {
#if (!defined(WEBRTC_CODEC_ISAC) && !defined(WEBRTC_CODEC_ISACFX))
ACMISAC::ACMISAC(int16_t /* codec_id */)
- : codec_inst_ptr_(NULL),
+ : codec_inst_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ codec_inst_ptr_(NULL),
is_enc_initialized_(false),
isac_coding_mode_(CHANNEL_INDEPENDENT),
enforce_frame_size_(false),
isac_currentBN_(32000),
samples_in10MsAudio_(160), // Initiates to 16 kHz mode.
- audio_decoder_(NULL),
- decoder_initialized_(false) {}
+ decoder_initialized_(false) {
+}
ACMISAC::~ACMISAC() {
return;
@@ -261,81 +263,14 @@ static uint16_t ACMISACFixGetDecSampRate(ACM_ISAC_STRUCT* /* inst */) {
#endif
-// Decoder class to be injected into NetEq.
-class AcmAudioDecoderIsac : public AudioDecoder {
- public:
- AcmAudioDecoderIsac(int codec_id, void* state)
- : AudioDecoder(ACMCodecDB::neteq_decoders_[codec_id]) {
- state_ = state;
- }
-
- // ACMISAC is the owner of the object where |state_| is pointing to.
- // Therefore, it should not be deleted in this destructor.
- virtual ~AcmAudioDecoderIsac() {}
-
- virtual int Decode(const uint8_t* encoded, size_t encoded_len,
- int16_t* decoded, SpeechType* speech_type) {
- int16_t temp_type;
- int ret = ACM_ISAC_DECODE_B(static_cast<ACM_ISAC_STRUCT*>(state_),
- reinterpret_cast<const uint16_t*>(encoded),
- static_cast<int16_t>(encoded_len), decoded,
- &temp_type);
- *speech_type = ConvertSpeechType(temp_type);
- return ret;
- }
-
- virtual bool HasDecodePlc() const { return true; }
-
- virtual int DecodePlc(int num_frames, int16_t* decoded) {
- return ACM_ISAC_DECODEPLC(static_cast<ACM_ISAC_STRUCT*>(state_),
- decoded, static_cast<int16_t>(num_frames));
- }
-
- virtual int Init() {
- return 0; // We expect that the initialized instance is injected in the
- // constructor.
- }
-
- virtual int IncomingPacket(const uint8_t* payload,
- size_t payload_len,
- uint16_t rtp_sequence_number,
- uint32_t rtp_timestamp,
- uint32_t arrival_timestamp) {
- return ACM_ISAC_DECODE_BWE(static_cast<ACM_ISAC_STRUCT*>(state_),
- reinterpret_cast<const uint16_t*>(payload),
- static_cast<uint32_t>(payload_len),
- rtp_sequence_number,
- rtp_timestamp,
- arrival_timestamp);
- }
-
- virtual int DecodeRedundant(const uint8_t* encoded,
- size_t encoded_len, int16_t* decoded,
- SpeechType* speech_type) {
- int16_t temp_type = 1; // Default is speech.
- int16_t ret = ACM_ISAC_DECODERCU(static_cast<ACM_ISAC_STRUCT*>(state_),
- reinterpret_cast<const uint16_t*>(encoded),
- static_cast<int16_t>(encoded_len), decoded,
- &temp_type);
- *speech_type = ConvertSpeechType(temp_type);
- return ret;
- }
-
- virtual int ErrorCode() {
- return ACM_ISAC_GETERRORCODE(static_cast<ACM_ISAC_STRUCT*>(state_));
- }
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AcmAudioDecoderIsac);
-};
-
ACMISAC::ACMISAC(int16_t codec_id)
- : is_enc_initialized_(false),
+ : AudioDecoder(ACMCodecDB::neteq_decoders_[codec_id]),
+ codec_inst_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ is_enc_initialized_(false),
isac_coding_mode_(CHANNEL_INDEPENDENT),
enforce_frame_size_(false),
isac_current_bn_(32000),
samples_in_10ms_audio_(160), // Initiates to 16 kHz mode.
- audio_decoder_(NULL),
decoder_initialized_(false) {
codec_id_ = codec_id;
@@ -345,14 +280,10 @@ ACMISAC::ACMISAC(int16_t codec_id)
return;
}
codec_inst_ptr_->inst = NULL;
+ state_ = codec_inst_ptr_;
}
ACMISAC::~ACMISAC() {
- if (audio_decoder_ != NULL) {
- delete audio_decoder_;
- audio_decoder_ = NULL;
- }
-
if (codec_inst_ptr_ != NULL) {
if (codec_inst_ptr_->inst != NULL) {
ACM_ISAC_FREE(codec_inst_ptr_->inst);
@@ -364,6 +295,34 @@ ACMISAC::~ACMISAC() {
return;
}
+int16_t ACMISAC::InternalInitDecoder(WebRtcACMCodecParams* codec_params) {
+ // set decoder sampling frequency.
+ if (codec_params->codec_inst.plfreq == 32000 ||
+ codec_params->codec_inst.plfreq == 48000) {
+ UpdateDecoderSampFreq(ACMCodecDB::kISACSWB);
+ } else {
+ UpdateDecoderSampFreq(ACMCodecDB::kISAC);
+ }
+
+ // in a one-way communication we may never register send-codec.
+ // However we like that the BWE to work properly so it has to
+ // be initialized. The BWE is initialized when iSAC encoder is initialized.
+ // Therefore, we need this.
+ if (!encoder_initialized_) {
+ // Since we don't require a valid rate or a valid packet size when
+ // initializing the decoder, we set valid values before initializing encoder
+ codec_params->codec_inst.rate = kIsacWbDefaultRate;
+ codec_params->codec_inst.pacsize = kIsacPacSize960;
+ if (InternalInitEncoder(codec_params) < 0) {
+ return -1;
+ }
+ encoder_initialized_ = true;
+ }
+
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
+ return ACM_ISAC_DECODERINIT(codec_inst_ptr_->inst);
+}
+
ACMGenericCodec* ACMISAC::CreateInstance(void) { return NULL; }
int16_t ACMISAC::InternalEncode(uint8_t* bitstream,
@@ -375,6 +334,7 @@ int16_t ACMISAC::InternalEncode(uint8_t* bitstream,
// at the first 10ms pushed in to iSAC if the bit-rate is low, this is
// sort of a bug in iSAC. to address this we treat iSAC as the
// following.
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
if (codec_inst_ptr_ == NULL) {
return -1;
}
@@ -428,6 +388,7 @@ int16_t ACMISAC::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
if (UpdateEncoderSampFreq((uint16_t)codec_params->codec_inst.plfreq) < 0) {
return -1;
}
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
if (ACM_ISAC_ENCODERINIT(codec_inst_ptr_->inst, isac_coding_mode_) < 0) {
return -1;
}
@@ -450,38 +411,8 @@ int16_t ACMISAC::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
return 0;
}
-int16_t ACMISAC::InternalInitDecoder(WebRtcACMCodecParams* codec_params) {
- if (codec_inst_ptr_ == NULL) {
- return -1;
- }
-
- // set decoder sampling frequency.
- if (codec_params->codec_inst.plfreq == 32000 ||
- codec_params->codec_inst.plfreq == 48000) {
- UpdateDecoderSampFreq(ACMCodecDB::kISACSWB);
- } else {
- UpdateDecoderSampFreq(ACMCodecDB::kISAC);
- }
-
- // in a one-way communication we may never register send-codec.
- // However we like that the BWE to work properly so it has to
- // be initialized. The BWE is initialized when iSAC encoder is initialized.
- // Therefore, we need this.
- if (!encoder_initialized_) {
- // Since we don't require a valid rate or a valid packet size when
- // initializing the decoder, we set valid values before initializing encoder
- codec_params->codec_inst.rate = kIsacWbDefaultRate;
- codec_params->codec_inst.pacsize = kIsacPacSize960;
- if (InternalInitEncoder(codec_params) < 0) {
- return -1;
- }
- encoder_initialized_ = true;
- }
-
- return ACM_ISAC_DECODERINIT(codec_inst_ptr_->inst);
-}
-
int16_t ACMISAC::InternalCreateEncoder() {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
if (codec_inst_ptr_ == NULL) {
return -1;
}
@@ -493,19 +424,6 @@ int16_t ACMISAC::InternalCreateEncoder() {
return status;
}
-void ACMISAC::DestructEncoderSafe() {
- // codec with shared instance cannot delete.
- encoder_initialized_ = false;
- return;
-}
-
-void ACMISAC::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- ACM_ISAC_FREE(static_cast<ACM_ISAC_STRUCT *>(ptr_inst));
- }
- return;
-}
-
int16_t ACMISAC::Transcode(uint8_t* bitstream,
int16_t* bitstream_len_byte,
int16_t q_bwe,
@@ -513,6 +431,7 @@ int16_t ACMISAC::Transcode(uint8_t* bitstream,
bool is_red) {
int16_t jitter_info = 0;
// transcode from a higher rate to lower rate sanity check
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
if (codec_inst_ptr_ == NULL) {
return -1;
}
@@ -530,7 +449,27 @@ int16_t ACMISAC::Transcode(uint8_t* bitstream,
}
}
+void ACMISAC::UpdateFrameLen() {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
+ frame_len_smpl_ = ACM_ISAC_GETNEWFRAMELEN(codec_inst_ptr_->inst);
+ encoder_params_.codec_inst.pacsize = frame_len_smpl_;
+}
+
+void ACMISAC::DestructEncoderSafe() {
+ // codec with shared instance cannot delete.
+ encoder_initialized_ = false;
+ return;
+}
+
+void ACMISAC::InternalDestructEncoderInst(void* ptr_inst) {
+ if (ptr_inst != NULL) {
+ ACM_ISAC_FREE(static_cast<ACM_ISAC_STRUCT *>(ptr_inst));
+ }
+ return;
+}
+
int16_t ACMISAC::SetBitRateSafe(int32_t bit_rate) {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
if (codec_inst_ptr_ == NULL) {
return -1;
}
@@ -594,6 +533,7 @@ int32_t ACMISAC::GetEstimatedBandwidthSafe() {
int samp_rate;
// Get bandwidth information
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
ACM_ISAC_GETSENDBWE(codec_inst_ptr_->inst, &bandwidth_index, &delay_index);
// Validy check of index
@@ -615,6 +555,7 @@ int32_t ACMISAC::SetEstimatedBandwidthSafe(int32_t estimated_bandwidth) {
int16_t bandwidth_index;
// Check sample frequency and choose appropriate table
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
samp_rate = ACM_ISAC_GETENCSAMPRATE(codec_inst_ptr_->inst);
if (samp_rate == 16000) {
@@ -657,6 +598,7 @@ int32_t ACMISAC::GetRedPayloadSafe(
return -1;
#else
uint8_t* red_payload, int16_t* payload_bytes) {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
int16_t bytes =
WebRtcIsac_GetRedPayload(
codec_inst_ptr_->inst, reinterpret_cast<int16_t*>(red_payload));
@@ -672,6 +614,7 @@ int16_t ACMISAC::UpdateDecoderSampFreq(
#ifdef WEBRTC_CODEC_ISAC
int16_t codec_id) {
// The decoder supports only wideband and super-wideband.
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
if (ACMCodecDB::kISAC == codec_id) {
return WebRtcIsac_SetDecSampRate(codec_inst_ptr_->inst, 16000);
} else if (ACMCodecDB::kISACSWB == codec_id ||
@@ -700,6 +643,7 @@ int16_t ACMISAC::UpdateEncoderSampFreq(
in_audio_ix_read_ = 0;
in_audio_ix_write_ = 0;
in_timestamp_ix_write_ = 0;
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
if (WebRtcIsac_SetEncSampRate(codec_inst_ptr_->inst,
encoder_samp_freq_hz) < 0) {
return -1;
@@ -718,6 +662,7 @@ int16_t ACMISAC::UpdateEncoderSampFreq(
}
int16_t ACMISAC::EncoderSampFreq(uint16_t* samp_freq_hz) {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
*samp_freq_hz = ACM_ISAC_GETENCSAMPRATE(codec_inst_ptr_->inst);
return 0;
}
@@ -730,6 +675,7 @@ int32_t ACMISAC::ConfigISACBandwidthEstimator(
{
uint16_t samp_freq_hz;
EncoderSampFreq(&samp_freq_hz);
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
// TODO(turajs): at 32kHz we hardcode calling with 30ms and enforce
// the frame-size otherwise we might get error. Revise if
// control-bwe is changed.
@@ -749,26 +695,25 @@ int32_t ACMISAC::ConfigISACBandwidthEstimator(
return -1;
}
UpdateFrameLen();
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
ACM_ISAC_GETSENDBITRATE(codec_inst_ptr_->inst, &isac_current_bn_);
return 0;
}
int32_t ACMISAC::SetISACMaxPayloadSize(const uint16_t max_payload_len_bytes) {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
return ACM_ISAC_SETMAXPAYLOADSIZE(codec_inst_ptr_->inst,
max_payload_len_bytes);
}
int32_t ACMISAC::SetISACMaxRate(const uint32_t max_rate_bit_per_sec) {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
return ACM_ISAC_SETMAXRATE(codec_inst_ptr_->inst, max_rate_bit_per_sec);
}
-void ACMISAC::UpdateFrameLen() {
- frame_len_smpl_ = ACM_ISAC_GETNEWFRAMELEN(codec_inst_ptr_->inst);
- encoder_params_.codec_inst.pacsize = frame_len_smpl_;
-}
-
void ACMISAC::CurrentRate(int32_t* rate_bit_per_sec) {
if (isac_coding_mode_ == ADAPTIVE) {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
ACM_ISAC_GETSENDBITRATE(codec_inst_ptr_->inst, rate_bit_per_sec);
}
}
@@ -784,12 +729,71 @@ int16_t ACMISAC::REDPayloadISAC(const int32_t isac_rate,
return status;
}
-AudioDecoder* ACMISAC::Decoder(int codec_id) {
- if (audio_decoder_)
- return audio_decoder_;
+int ACMISAC::Decode(const uint8_t* encoded,
+ size_t encoded_len,
+ int16_t* decoded,
+ SpeechType* speech_type) {
+ int16_t temp_type;
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
+ int ret =
+ ACM_ISAC_DECODE_B(static_cast<ACM_ISAC_STRUCT*>(codec_inst_ptr_->inst),
+ reinterpret_cast<const uint16_t*>(encoded),
+ static_cast<int16_t>(encoded_len),
+ decoded,
+ &temp_type);
+ *speech_type = ConvertSpeechType(temp_type);
+ return ret;
+}
+
+int ACMISAC::DecodePlc(int num_frames, int16_t* decoded) {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
+ return ACM_ISAC_DECODEPLC(
+ static_cast<ACM_ISAC_STRUCT*>(codec_inst_ptr_->inst),
+ decoded,
+ static_cast<int16_t>(num_frames));
+}
+
+int ACMISAC::IncomingPacket(const uint8_t* payload,
+ size_t payload_len,
+ uint16_t rtp_sequence_number,
+ uint32_t rtp_timestamp,
+ uint32_t arrival_timestamp) {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
+ return ACM_ISAC_DECODE_BWE(
+ static_cast<ACM_ISAC_STRUCT*>(codec_inst_ptr_->inst),
+ reinterpret_cast<const uint16_t*>(payload),
+ static_cast<uint32_t>(payload_len),
+ rtp_sequence_number,
+ rtp_timestamp,
+ arrival_timestamp);
+}
+
+int ACMISAC::DecodeRedundant(const uint8_t* encoded,
+ size_t encoded_len,
+ int16_t* decoded,
+ SpeechType* speech_type) {
+ int16_t temp_type = 1; // Default is speech.
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
+ int16_t ret =
+ ACM_ISAC_DECODERCU(static_cast<ACM_ISAC_STRUCT*>(codec_inst_ptr_->inst),
+ reinterpret_cast<const uint16_t*>(encoded),
+ static_cast<int16_t>(encoded_len),
+ decoded,
+ &temp_type);
+ *speech_type = ConvertSpeechType(temp_type);
+ return ret;
+}
+
+int ACMISAC::ErrorCode() {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
+ return ACM_ISAC_GETERRORCODE(
+ static_cast<ACM_ISAC_STRUCT*>(codec_inst_ptr_->inst));
+}
+AudioDecoder* ACMISAC::Decoder(int codec_id) {
// Create iSAC instance if it does not exist.
if (!encoder_exist_) {
+ CriticalSectionScoped lock(codec_inst_crit_sect_.get());
assert(codec_inst_ptr_->inst == NULL);
encoder_initialized_ = false;
decoder_initialized_ = false;
@@ -822,8 +826,7 @@ AudioDecoder* ACMISAC::Decoder(int codec_id) {
decoder_initialized_ = true;
}
- audio_decoder_ = new AcmAudioDecoderIsac(codec_id, codec_inst_ptr_->inst);
- return audio_decoder_;
+ return this;
}
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.h b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.h
index a3227d5d0b3..3249526f7d8 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_isac.h
@@ -12,86 +12,119 @@
#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_ISAC_H_
#include "webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
namespace webrtc {
+class CriticalSectionWrapper;
+
namespace acm2 {
struct ACMISACInst;
-class AcmAudioDecoderIsac;
enum IsacCodingMode {
ADAPTIVE,
CHANNEL_INDEPENDENT
};
-class ACMISAC : public ACMGenericCodec {
+class ACMISAC : public ACMGenericCodec, AudioDecoder {
public:
explicit ACMISAC(int16_t codec_id);
~ACMISAC();
- // for FEC
- ACMGenericCodec* CreateInstance(void);
+ int16_t InternalInitDecoder(WebRtcACMCodecParams* codec_params);
- int16_t InternalEncode(uint8_t* bitstream, int16_t* bitstream_len_byte);
+ // Methods below are inherited from ACMGenericCodec.
+ ACMGenericCodec* CreateInstance(void) OVERRIDE;
- int16_t InternalInitEncoder(WebRtcACMCodecParams* codec_params);
+ int16_t InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte) OVERRIDE;
- int16_t InternalInitDecoder(WebRtcACMCodecParams* codec_params);
+ int16_t InternalInitEncoder(WebRtcACMCodecParams* codec_params) OVERRIDE;
- int16_t UpdateDecoderSampFreq(int16_t codec_id);
+ int16_t UpdateDecoderSampFreq(int16_t codec_id) OVERRIDE;
- int16_t UpdateEncoderSampFreq(uint16_t samp_freq_hz);
+ int16_t UpdateEncoderSampFreq(uint16_t samp_freq_hz) OVERRIDE;
- int16_t EncoderSampFreq(uint16_t* samp_freq_hz);
+ int16_t EncoderSampFreq(uint16_t* samp_freq_hz) OVERRIDE;
int32_t ConfigISACBandwidthEstimator(const uint8_t init_frame_size_msec,
const uint16_t init_rate_bit_per_sec,
- const bool enforce_frame_size);
+ const bool enforce_frame_size) OVERRIDE;
- int32_t SetISACMaxPayloadSize(const uint16_t max_payload_len_bytes);
+ int32_t SetISACMaxPayloadSize(const uint16_t max_payload_len_bytes) OVERRIDE;
- int32_t SetISACMaxRate(const uint32_t max_rate_bit_per_sec);
+ int32_t SetISACMaxRate(const uint32_t max_rate_bit_per_sec) OVERRIDE;
int16_t REDPayloadISAC(const int32_t isac_rate,
const int16_t isac_bw_estimate,
uint8_t* payload,
- int16_t* payload_len_bytes);
+ int16_t* payload_len_bytes) OVERRIDE;
- protected:
- void DestructEncoderSafe();
+ // Methods below are inherited from AudioDecoder.
+ virtual int Decode(const uint8_t* encoded,
+ size_t encoded_len,
+ int16_t* decoded,
+ SpeechType* speech_type) OVERRIDE;
- int16_t SetBitRateSafe(const int32_t bit_rate);
+ virtual bool HasDecodePlc() const OVERRIDE { return true; }
- int32_t GetEstimatedBandwidthSafe();
+ virtual int DecodePlc(int num_frames, int16_t* decoded) OVERRIDE;
- int32_t SetEstimatedBandwidthSafe(int32_t estimated_bandwidth);
+ virtual int Init() OVERRIDE { return 0; }
- int32_t GetRedPayloadSafe(uint8_t* red_payload, int16_t* payload_bytes);
+ virtual int IncomingPacket(const uint8_t* payload,
+ size_t payload_len,
+ uint16_t rtp_sequence_number,
+ uint32_t rtp_timestamp,
+ uint32_t arrival_timestamp) OVERRIDE;
- int16_t InternalCreateEncoder();
+ virtual int DecodeRedundant(const uint8_t* encoded,
+ size_t encoded_len,
+ int16_t* decoded,
+ SpeechType* speech_type) OVERRIDE;
- void InternalDestructEncoderInst(void* ptr_inst);
+ virtual int ErrorCode() OVERRIDE;
+ protected:
int16_t Transcode(uint8_t* bitstream,
int16_t* bitstream_len_byte,
int16_t q_bwe,
int32_t rate,
bool is_red);
- void CurrentRate(int32_t* rate_bit_per_sec);
-
void UpdateFrameLen();
- virtual AudioDecoder* Decoder(int codec_id);
+ // Methods below are inherited from ACMGenericCodec.
+ void DestructEncoderSafe() OVERRIDE;
+
+ int16_t SetBitRateSafe(const int32_t bit_rate) OVERRIDE;
+
+ int32_t GetEstimatedBandwidthSafe() OVERRIDE;
+
+ int32_t SetEstimatedBandwidthSafe(int32_t estimated_bandwidth) OVERRIDE;
+
+ int32_t GetRedPayloadSafe(uint8_t* red_payload,
+ int16_t* payload_bytes) OVERRIDE;
+
+ int16_t InternalCreateEncoder() OVERRIDE;
+
+ void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
+
+ void CurrentRate(int32_t* rate_bit_per_sec) OVERRIDE;
+
+ virtual AudioDecoder* Decoder(int codec_id) OVERRIDE;
- ACMISACInst* codec_inst_ptr_;
+ // |codec_inst_crit_sect_| protects |codec_inst_ptr_|.
+ const scoped_ptr<CriticalSectionWrapper> codec_inst_crit_sect_;
+ ACMISACInst* codec_inst_ptr_ GUARDED_BY(codec_inst_crit_sect_);
bool is_enc_initialized_;
IsacCodingMode isac_coding_mode_;
bool enforce_frame_size_;
int32_t isac_current_bn_;
uint16_t samples_in_10ms_audio_;
- AcmAudioDecoderIsac* audio_decoder_;
bool decoder_initialized_;
};
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.cc
index c00a9203a9d..544c932f39f 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.cc
@@ -75,6 +75,8 @@ ACMOpus::ACMOpus(int16_t codec_id)
// Opus has internal DTX, but we dont use it for now.
has_internal_dtx_ = false;
+ has_internal_fec_ = true;
+
if (codec_id_ != ACMCodecDB::kOpus) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
"Wrong codec id for Opus.");
@@ -140,6 +142,20 @@ int16_t ACMOpus::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
// Store bitrate.
bitrate_ = codec_params->codec_inst.rate;
+ // TODO(tlegrand): Remove this code when we have proper APIs to set the
+ // complexity at a higher level.
+#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS) || defined(WEBRTC_ARCH_ARM)
+ // If we are on Android, iOS and/or ARM, use a lower complexity setting as
+ // default, to save encoder complexity.
+ const int kOpusComplexity5 = 5;
+ WebRtcOpus_SetComplexity(encoder_inst_ptr_, kOpusComplexity5);
+ if (ret < 0) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
+ "Setting complexity failed for Opus");
+ return ret;
+ }
+#endif
+
return 0;
}
@@ -184,6 +200,31 @@ int16_t ACMOpus::SetBitRateSafe(const int32_t rate) {
return -1;
}
+int ACMOpus::SetFEC(bool enable_fec) {
+ // Ask the encoder to enable FEC.
+ if (enable_fec) {
+ if (WebRtcOpus_EnableFec(encoder_inst_ptr_) == 0) {
+ fec_enabled_ = true;
+ return 0;
+ }
+ } else {
+ if (WebRtcOpus_DisableFec(encoder_inst_ptr_) == 0) {
+ fec_enabled_ = false;
+ return 0;
+ }
+ }
+ return -1;
+}
+
+int ACMOpus::SetPacketLossRate(int loss_rate) {
+ // Ask the encoder to change the target packet loss rate.
+ if (WebRtcOpus_SetPacketLossRate(encoder_inst_ptr_, loss_rate) == 0) {
+ packet_loss_rate_ = loss_rate;
+ return 0;
+ }
+ return -1;
+}
+
#endif // WEBRTC_CODEC_OPUS
} // namespace acm2
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.h b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.h
index a346e3c8ff3..07ce0721686 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_opus.h
@@ -32,6 +32,10 @@ class ACMOpus : public ACMGenericCodec {
int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
+ virtual int SetFEC(bool enable_fec) OVERRIDE;
+
+ virtual int SetPacketLossRate(int loss_rate) OVERRIDE;
+
protected:
void DestructEncoderSafe();
@@ -43,8 +47,11 @@ class ACMOpus : public ACMGenericCodec {
WebRtcOpusEncInst* encoder_inst_ptr_;
uint16_t sample_freq_;
- uint16_t bitrate_;
+ int32_t bitrate_;
int channels_;
+
+ bool fec_enabled_;
+ int packet_loss_rate_;
};
} // namespace acm2
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.cc
index ac92198f92b..cb7c4184079 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.cc
@@ -21,8 +21,9 @@
#include "webrtc/modules/audio_coding/main/acm2/acm_resampler.h"
#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h"
#include "webrtc/modules/audio_coding/main/acm2/nack.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
@@ -35,7 +36,6 @@ namespace acm2 {
namespace {
-const int kNeteqInitSampleRateHz = 16000;
const int kNackThresholdPackets = 2;
// |vad_activity_| field of |audio_frame| is set to |previous_audio_activity_|
@@ -117,21 +117,23 @@ bool IsCng(int codec_id) {
} // namespace
-AcmReceiver::AcmReceiver()
- : id_(0),
- neteq_(NetEq::Create(kNeteqInitSampleRateHz)),
+AcmReceiver::AcmReceiver(const AudioCodingModule::Config& config)
+ : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ id_(config.id),
last_audio_decoder_(-1), // Invalid value.
- decode_lock_(RWLockWrapper::CreateRWLock()),
- neteq_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- vad_enabled_(true),
- previous_audio_activity_(AudioFrame::kVadUnknown),
- current_sample_rate_hz_(kNeteqInitSampleRateHz),
+ previous_audio_activity_(AudioFrame::kVadPassive),
+ current_sample_rate_hz_(config.neteq_config.sample_rate_hz),
nack_(),
nack_enabled_(false),
+ neteq_(NetEq::Create(config.neteq_config)),
+ decode_lock_(RWLockWrapper::CreateRWLock()),
+ vad_enabled_(true),
+ clock_(config.clock),
av_sync_(false),
initial_delay_manager_(),
missing_packets_sync_stream_(),
late_packets_sync_stream_() {
+ assert(clock_);
for (int n = 0; n < ACMCodecDB::kMaxNumCodecs; ++n) {
decoders_[n].registered = false;
}
@@ -148,7 +150,6 @@ AcmReceiver::AcmReceiver()
AcmReceiver::~AcmReceiver() {
delete neteq_;
delete decode_lock_;
- delete neteq_crit_sect_;
}
int AcmReceiver::SetMinimumDelay(int delay_ms) {
@@ -162,7 +163,7 @@ int AcmReceiver::SetInitialDelay(int delay_ms) {
if (delay_ms < 0 || delay_ms > 10000) {
return -1;
}
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
if (delay_ms == 0) {
av_sync_ = false;
@@ -206,7 +207,7 @@ int AcmReceiver::LeastRequiredDelayMs() const {
}
int AcmReceiver::current_sample_rate_hz() const {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
return current_sample_rate_hz_;
}
@@ -269,7 +270,7 @@ int AcmReceiver::InsertPacket(const WebRtcRTPHeader& rtp_header,
const RTPHeader* header = &rtp_header.header; // Just a shorthand.
{
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
int codec_id = RtpHeaderToCodecIndex(*header, incoming_payload);
if (codec_id < 0) {
@@ -328,7 +329,7 @@ int AcmReceiver::InsertPacket(const WebRtcRTPHeader& rtp_header,
rtp_header, receive_timestamp, packet_type, new_codec, sample_rate_hz,
missing_packets_sync_stream_.get());
}
- }
+ } // |crit_sect_| is released.
{
WriteLockScoped lock_codecs(*decode_lock_); // Lock to prevent an encoding.
@@ -359,7 +360,7 @@ int AcmReceiver::GetAudio(int desired_freq_hz, AudioFrame* audio_frame) {
{
// Accessing members, take the lock.
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
if (av_sync_) {
assert(initial_delay_manager_.get());
@@ -404,7 +405,7 @@ int AcmReceiver::GetAudio(int desired_freq_hz, AudioFrame* audio_frame) {
}
// Accessing members, take the lock.
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
// Update NACK.
int decoded_sequence_num = 0;
@@ -426,9 +427,13 @@ int AcmReceiver::GetAudio(int desired_freq_hz, AudioFrame* audio_frame) {
if (ptr_audio_buffer == audio_buffer_) {
// Data is written to local buffer.
if (need_resampling) {
- samples_per_channel = resampler_.Resample10Msec(
- audio_buffer_, current_sample_rate_hz_, desired_freq_hz,
- num_channels, audio_frame->data_);
+ samples_per_channel =
+ resampler_.Resample10Msec(audio_buffer_,
+ current_sample_rate_hz_,
+ desired_freq_hz,
+ num_channels,
+ AudioFrame::kMaxDataSizeSamples,
+ audio_frame->data_);
if (samples_per_channel < 0) {
LOG_FERR0(LS_ERROR, "AcmReceiver::GetAudio") << "Resampler Failed.";
return -1;
@@ -442,9 +447,13 @@ int AcmReceiver::GetAudio(int desired_freq_hz, AudioFrame* audio_frame) {
// Data is written into |audio_frame|.
if (need_resampling) {
// We might end up here ONLY if codec is changed.
- samples_per_channel = resampler_.Resample10Msec(
- audio_frame->data_, current_sample_rate_hz_, desired_freq_hz,
- num_channels, audio_buffer_);
+ samples_per_channel =
+ resampler_.Resample10Msec(audio_frame->data_,
+ current_sample_rate_hz_,
+ desired_freq_hz,
+ num_channels,
+ AudioFrame::kMaxDataSizeSamples,
+ audio_buffer_);
if (samples_per_channel < 0) {
LOG_FERR0(LS_ERROR, "AcmReceiver::GetAudio") << "Resampler Failed.";
return -1;
@@ -463,6 +472,19 @@ int AcmReceiver::GetAudio(int desired_freq_hz, AudioFrame* audio_frame) {
SetAudioFrameActivityAndType(vad_enabled_, type, audio_frame);
previous_audio_activity_ = audio_frame->vad_activity_;
call_stats_.DecodedByNetEq(audio_frame->speech_type_);
+
+ // Computes the RTP timestamp of the first sample in |audio_frame| from
+ // |GetPlayoutTimestamp|, which is the timestamp of the last sample of
+ // |audio_frame|.
+ uint32_t playout_timestamp = 0;
+ if (GetPlayoutTimestamp(&playout_timestamp)) {
+ audio_frame->timestamp_ =
+ playout_timestamp - audio_frame->samples_per_channel_;
+ } else {
+ // Remain 0 until we have a valid |playout_timestamp|.
+ audio_frame->timestamp_ = 0;
+ }
+
return 0;
}
@@ -473,19 +495,25 @@ int32_t AcmReceiver::AddCodec(int acm_codec_id,
assert(acm_codec_id >= 0 && acm_codec_id < ACMCodecDB::kMaxNumCodecs);
NetEqDecoder neteq_decoder = ACMCodecDB::neteq_decoders_[acm_codec_id];
- CriticalSectionScoped lock(neteq_crit_sect_);
+ // Make sure the right decoder is registered for Opus.
+ if (neteq_decoder == kDecoderOpus && channels == 2) {
+ neteq_decoder = kDecoderOpus_2ch;
+ }
+
+ CriticalSectionScoped lock(crit_sect_.get());
// The corresponding NetEq decoder ID.
// If this coder has been registered before.
if (decoders_[acm_codec_id].registered) {
- if (decoders_[acm_codec_id].payload_type == payload_type) {
+ if (decoders_[acm_codec_id].payload_type == payload_type &&
+ decoders_[acm_codec_id].channels == channels) {
// Re-registering the same codec with the same payload-type. Do nothing
// and return.
return 0;
}
- // Changing the payload-type of this codec. First unregister. Then register
- // with new payload-type.
+ // Changing the payload-type or number of channels for this codec.
+ // First unregister. Then register with new payload-type/channels.
if (neteq_->RemovePayloadType(decoders_[acm_codec_id].payload_type) !=
NetEq::kOK) {
LOG_F(LS_ERROR) << "Cannot remover payload "
@@ -499,8 +527,7 @@ int32_t AcmReceiver::AddCodec(int acm_codec_id,
ret_val = neteq_->RegisterPayloadType(neteq_decoder, payload_type);
} else {
ret_val = neteq_->RegisterExternalDecoder(
- audio_decoder, neteq_decoder,
- ACMCodecDB::database_[acm_codec_id].plfreq, payload_type);
+ audio_decoder, neteq_decoder, payload_type);
}
if (ret_val != NetEq::kOK) {
LOG_FERR3(LS_ERROR, "AcmReceiver::AddCodec", acm_codec_id, payload_type,
@@ -519,13 +546,13 @@ int32_t AcmReceiver::AddCodec(int acm_codec_id,
void AcmReceiver::EnableVad() {
neteq_->EnableVad();
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
vad_enabled_ = true;
}
void AcmReceiver::DisableVad() {
neteq_->DisableVad();
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
vad_enabled_ = false;
}
@@ -537,7 +564,7 @@ void AcmReceiver::FlushBuffers() {
// many as it can.
int AcmReceiver::RemoveAllCodecs() {
int ret_val = 0;
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
for (int n = 0; n < ACMCodecDB::kMaxNumCodecs; ++n) {
if (decoders_[n].registered) {
if (neteq_->RemovePayloadType(decoders_[n].payload_type) == 0) {
@@ -557,15 +584,13 @@ int AcmReceiver::RemoveAllCodecs() {
int AcmReceiver::RemoveCodec(uint8_t payload_type) {
int codec_index = PayloadType2CodecIndex(payload_type);
if (codec_index < 0) { // Such a payload-type is not registered.
- LOG(LS_WARNING) << "payload_type " << payload_type << " is not registered,"
- " no action is taken.";
return 0;
}
if (neteq_->RemovePayloadType(payload_type) != NetEq::kOK) {
LOG_FERR1(LS_ERROR, "AcmReceiver::RemoveCodec", payload_type);
return -1;
}
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
decoders_[codec_index].registered = false;
if (last_audio_decoder_ == codec_index)
last_audio_decoder_ = -1; // Codec is removed, invalidate last decoder.
@@ -573,26 +598,27 @@ int AcmReceiver::RemoveCodec(uint8_t payload_type) {
}
void AcmReceiver::set_id(int id) {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
id_ = id;
}
-uint32_t AcmReceiver::PlayoutTimestamp() {
+bool AcmReceiver::GetPlayoutTimestamp(uint32_t* timestamp) {
if (av_sync_) {
assert(initial_delay_manager_.get());
- if (initial_delay_manager_->buffering())
- return initial_delay_manager_->playout_timestamp();
+ if (initial_delay_manager_->buffering()) {
+ return initial_delay_manager_->GetPlayoutTimestamp(timestamp);
+ }
}
- return neteq_->PlayoutTimestamp();
+ return neteq_->GetPlayoutTimestamp(timestamp);
}
int AcmReceiver::last_audio_codec_id() const {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
return last_audio_decoder_;
}
int AcmReceiver::last_audio_payload_type() const {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
if (last_audio_decoder_ < 0)
return -1;
assert(decoders_[last_audio_decoder_].registered);
@@ -600,7 +626,7 @@ int AcmReceiver::last_audio_payload_type() const {
}
int AcmReceiver::RedPayloadType() const {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
if (ACMCodecDB::kRED < 0 ||
!decoders_[ACMCodecDB::kRED].registered) {
LOG_F(LS_WARNING) << "RED is not registered.";
@@ -610,9 +636,8 @@ int AcmReceiver::RedPayloadType() const {
}
int AcmReceiver::LastAudioCodec(CodecInst* codec) const {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
if (last_audio_decoder_ < 0) {
- LOG_F(LS_WARNING) << "No audio payload is received, yet.";
return -1;
}
assert(decoders_[last_audio_decoder_].registered);
@@ -636,6 +661,7 @@ void AcmReceiver::NetworkStatistics(ACMNetworkStatistics* acm_stat) {
acm_stat->currentPreemptiveRate = neteq_stat.preemptive_rate;
acm_stat->currentAccelerateRate = neteq_stat.accelerate_rate;
acm_stat->clockDriftPPM = neteq_stat.clockdrift_ppm;
+ acm_stat->addedSamples = neteq_stat.added_zero_samples;
std::vector<int> waiting_times;
neteq_->WaitingTimes(&waiting_times);
@@ -665,7 +691,7 @@ void AcmReceiver::NetworkStatistics(ACMNetworkStatistics* acm_stat) {
int AcmReceiver::DecoderByPayloadType(uint8_t payload_type,
CodecInst* codec) const {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
int codec_index = PayloadType2CodecIndex(payload_type);
if (codec_index < 0) {
LOG_FERR1(LS_ERROR, "AcmReceiver::DecoderByPayloadType", payload_type);
@@ -691,7 +717,7 @@ int AcmReceiver::EnableNack(size_t max_nack_list_size) {
if (max_nack_list_size == 0 || max_nack_list_size > Nack::kNackListSizeLimit)
return -1;
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
if (!nack_enabled_) {
nack_.reset(Nack::Create(kNackThresholdPackets));
nack_enabled_ = true;
@@ -707,14 +733,14 @@ int AcmReceiver::EnableNack(size_t max_nack_list_size) {
}
void AcmReceiver::DisableNack() {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
nack_.reset(); // Memory is released.
nack_enabled_ = false;
}
std::vector<uint16_t> AcmReceiver::GetNackList(
int round_trip_time_ms) const {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
if (round_trip_time_ms < 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
"GetNackList: round trip time cannot be negative."
@@ -730,7 +756,7 @@ std::vector<uint16_t> AcmReceiver::GetNackList(
void AcmReceiver::ResetInitialDelay() {
{
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
av_sync_ = false;
initial_delay_manager_.reset(NULL);
missing_packets_sync_stream_.reset(NULL);
@@ -752,13 +778,9 @@ bool AcmReceiver::GetSilence(int desired_sample_rate_hz, AudioFrame* frame) {
// exceeds a threshold.
int num_packets;
int max_num_packets;
- int buffer_size_byte;
- int max_buffer_size_byte;
const float kBufferingThresholdScale = 0.9f;
- neteq_->PacketBufferStatistics(&num_packets, &max_num_packets,
- &buffer_size_byte, &max_buffer_size_byte);
- if (num_packets > max_num_packets * kBufferingThresholdScale ||
- buffer_size_byte > max_buffer_size_byte * kBufferingThresholdScale) {
+ neteq_->PacketBufferStatistics(&num_packets, &max_num_packets);
+ if (num_packets > max_num_packets * kBufferingThresholdScale) {
initial_delay_manager_->DisableBuffering();
return false;
}
@@ -771,7 +793,6 @@ bool AcmReceiver::GetSilence(int desired_sample_rate_hz, AudioFrame* frame) {
current_sample_rate_hz_ = ACMCodecDB::database_[last_audio_decoder_].plfreq;
frame->num_channels_ = decoders_[last_audio_decoder_].channels;
} else {
- current_sample_rate_hz_ = kNeteqInitSampleRateHz;
frame->num_channels_ = 1;
}
@@ -785,7 +806,6 @@ bool AcmReceiver::GetSilence(int desired_sample_rate_hz, AudioFrame* frame) {
frame->samples_per_channel_ = frame->sample_rate_hz_ / 100; // Always 10 ms.
frame->speech_type_ = AudioFrame::kCNG;
frame->vad_activity_ = AudioFrame::kVadPassive;
- frame->energy_ = 0;
int samples = frame->samples_per_channel_ * frame->num_channels_;
memset(frame->data_, 0, samples * sizeof(int16_t));
return true;
@@ -815,7 +835,7 @@ uint32_t AcmReceiver::NowInTimestamp(int decoder_sampling_rate) const {
// We masked 6 most significant bits of 32-bit so there is no overflow in
// the conversion from milliseconds to timestamp.
const uint32_t now_in_ms = static_cast<uint32_t>(
- TickTime::MillisecondTimestamp() & 0x03ffffff);
+ clock_->TimeInMilliseconds() & 0x03ffffff);
return static_cast<uint32_t>(
(decoder_sampling_rate / 1000) * now_in_ms);
}
@@ -839,7 +859,7 @@ void AcmReceiver::InsertStreamOfSyncPackets(
void AcmReceiver::GetDecodingCallStatistics(
AudioDecodingCallStats* stats) const {
- CriticalSectionScoped lock(neteq_crit_sect_);
+ CriticalSectionScoped lock(crit_sect_.get());
*stats = call_stats_.GetDecodingStatistics();
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.h b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.h
index 81eb5206b8a..b6898f73f9a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver.h
@@ -20,9 +20,10 @@
#include "webrtc/modules/audio_coding/main/acm2/acm_resampler.h"
#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h"
#include "webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -47,9 +48,7 @@ class AcmReceiver {
};
// Constructor of the class
- AcmReceiver();
-
- explicit AcmReceiver(NetEq* neteq);
+ explicit AcmReceiver(const AudioCodingModule::Config& config);
// Destructor of the class.
~AcmReceiver();
@@ -244,9 +243,10 @@ class AcmReceiver {
void set_id(int id); // TODO(turajs): can be inline.
//
- // Returns the RTP timestamp of the last sample delivered by GetAudio().
+ // Gets the RTP timestamp of the last sample delivered by GetAudio().
+ // Returns true if the RTP timestamp is valid, otherwise false.
//
- uint32_t PlayoutTimestamp();
+ bool GetPlayoutTimestamp(uint32_t* timestamp);
//
// Return the index of the codec associated with the last non-CNG/non-DTMF
@@ -328,7 +328,8 @@ class AcmReceiver {
private:
int PayloadType2CodecIndex(uint8_t payload_type) const;
- bool GetSilence(int desired_sample_rate_hz, AudioFrame* frame);
+ bool GetSilence(int desired_sample_rate_hz, AudioFrame* frame)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
int GetNumSyncPacketToInsert(uint16_t received_squence_number);
@@ -339,20 +340,23 @@ class AcmReceiver {
void InsertStreamOfSyncPackets(InitialDelayManager::SyncStream* sync_stream);
- int id_;
+ scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ int id_; // TODO(henrik.lundin) Make const.
+ int last_audio_decoder_ GUARDED_BY(crit_sect_);
+ AudioFrame::VADActivity previous_audio_activity_ GUARDED_BY(crit_sect_);
+ int current_sample_rate_hz_ GUARDED_BY(crit_sect_);
+ ACMResampler resampler_ GUARDED_BY(crit_sect_);
+ // Used in GetAudio, declared as member to avoid allocating every 10ms.
+ // TODO(henrik.lundin) Stack-allocate in GetAudio instead?
+ int16_t audio_buffer_[AudioFrame::kMaxDataSizeSamples] GUARDED_BY(crit_sect_);
+ scoped_ptr<Nack> nack_ GUARDED_BY(crit_sect_);
+ bool nack_enabled_ GUARDED_BY(crit_sect_);
+ CallStatistics call_stats_ GUARDED_BY(crit_sect_);
NetEq* neteq_;
Decoder decoders_[ACMCodecDB::kMaxNumCodecs];
- int last_audio_decoder_;
RWLockWrapper* decode_lock_;
- CriticalSectionWrapper* neteq_crit_sect_;
bool vad_enabled_;
- AudioFrame::VADActivity previous_audio_activity_;
- int current_sample_rate_hz_;
- ACMResampler resampler_;
- // Used in GetAudio, declared as member to avoid allocating every 10ms.
- int16_t audio_buffer_[AudioFrame::kMaxDataSizeSamples];
- scoped_ptr<Nack> nack_;
- bool nack_enabled_;
+ Clock* clock_; // TODO(henrik.lundin) Make const if possible.
// Indicates if a non-zero initial delay is set, and the receiver is in
// AV-sync mode.
@@ -366,8 +370,6 @@ class AcmReceiver {
// initial delay is set.
scoped_ptr<InitialDelayManager::SyncStream> missing_packets_sync_stream_;
scoped_ptr<InitialDelayManager::SyncStream> late_packets_sync_stream_;
-
- CallStatistics call_stats_;
};
} // namespace acm2
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver_unittest.cc
index 712eeb26877..4234f146474 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_receiver_unittest.cc
@@ -16,7 +16,8 @@
#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h"
#include "webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h"
#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/neteq4/tools/rtp_generator.h"
+#include "webrtc/modules/audio_coding/neteq/tools/rtp_generator.h"
+#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/test/test_suite.h"
#include "webrtc/test/testsupport/fileutils.h"
@@ -42,12 +43,14 @@ class AcmReceiverTest : public AudioPacketizationCallback,
public ::testing::Test {
protected:
AcmReceiverTest()
- : receiver_(new AcmReceiver),
- acm_(new AudioCodingModuleImpl(0)),
- timestamp_(0),
+ : timestamp_(0),
packet_sent_(false),
last_packet_send_timestamp_(timestamp_),
- last_frame_type_(kFrameEmpty) {}
+ last_frame_type_(kFrameEmpty) {
+ AudioCodingModule::Config config;
+ acm_.reset(new AudioCodingModuleImpl(config));
+ receiver_.reset(new AcmReceiver(config));
+ }
~AcmReceiverTest() {}
@@ -302,55 +305,6 @@ TEST_F(AcmReceiverTest, DISABLED_ON_ANDROID(PostdecodingVad)) {
EXPECT_EQ(AudioFrame::kVadUnknown, frame.vad_activity_);
}
-TEST_F(AcmReceiverTest, DISABLED_ON_ANDROID(FlushBuffer)) {
- const int id = ACMCodecDB::kISAC;
- EXPECT_EQ(0, receiver_->AddCodec(id, codecs_[id].pltype, codecs_[id].channels,
- NULL));
- const int kNumPackets = 5;
- const int num_10ms_frames = codecs_[id].pacsize / (codecs_[id].plfreq / 100);
- for (int n = 0; n < kNumPackets; ++n)
- InsertOnePacketOfSilence(id);
- ACMNetworkStatistics statistics;
- receiver_->NetworkStatistics(&statistics);
- ASSERT_EQ(num_10ms_frames * kNumPackets * 10, statistics.currentBufferSize);
-
- receiver_->FlushBuffers();
- receiver_->NetworkStatistics(&statistics);
- ASSERT_EQ(0, statistics.currentBufferSize);
-}
-
-TEST_F(AcmReceiverTest, DISABLED_ON_ANDROID(PlayoutTimestamp)) {
- const int id = ACMCodecDB::kPCM16Bwb;
- EXPECT_EQ(0, receiver_->AddCodec(id, codecs_[id].pltype, codecs_[id].channels,
- NULL));
- receiver_->SetPlayoutMode(fax);
- const int kNumPackets = 5;
- const int num_10ms_frames = codecs_[id].pacsize / (codecs_[id].plfreq / 100);
- uint32_t expected_timestamp;
- AudioFrame frame;
- int ts_offset = 0;
- bool first_audio_frame = true;
- for (int n = 0; n < kNumPackets; ++n) {
- packet_sent_ = false;
- InsertOnePacketOfSilence(id);
- ASSERT_TRUE(packet_sent_);
- expected_timestamp = last_packet_send_timestamp_;
- for (int k = 0; k < num_10ms_frames; ++k) {
- ASSERT_EQ(0, receiver_->GetAudio(codecs_[id].plfreq, &frame));
- if (first_audio_frame) {
- // There is an offset in playout timestamps. Perhaps, it is related to
- // initial delay that NetEq applies
- ts_offset = receiver_->PlayoutTimestamp() - expected_timestamp;
- first_audio_frame = false;
- } else {
- EXPECT_EQ(expected_timestamp + ts_offset,
- receiver_->PlayoutTimestamp());
- }
- expected_timestamp += codecs_[id].plfreq / 100; // Increment by 10 ms.
- }
- }
-}
-
TEST_F(AcmReceiverTest, DISABLED_ON_ANDROID(LastAudioCodec)) {
const int kCodecId[] = {
ACMCodecDB::kISAC, ACMCodecDB::kPCMA, ACMCodecDB::kISACSWB,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.cc
index 3abe4f1ec46..97d87b1b3a4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.cc
@@ -10,61 +10,59 @@
#include "webrtc/modules/audio_coding/main/acm2/acm_resampler.h"
+#include <assert.h>
#include <string.h>
#include "webrtc/common_audio/resampler/include/resampler.h"
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
-
namespace acm2 {
-ACMResampler::ACMResampler()
- : resampler_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()) {
+ACMResampler::ACMResampler() {
}
ACMResampler::~ACMResampler() {
- delete resampler_crit_sect_;
}
int ACMResampler::Resample10Msec(const int16_t* in_audio,
int in_freq_hz,
int out_freq_hz,
int num_audio_channels,
+ int out_capacity_samples,
int16_t* out_audio) {
- CriticalSectionScoped cs(resampler_crit_sect_);
-
+ int in_length = in_freq_hz * num_audio_channels / 100;
+ int out_length = out_freq_hz * num_audio_channels / 100;
if (in_freq_hz == out_freq_hz) {
- size_t length = static_cast<size_t>(in_freq_hz * num_audio_channels / 100);
- memcpy(out_audio, in_audio, length * sizeof(int16_t));
- return static_cast<int16_t>(in_freq_hz / 100);
+ if (out_capacity_samples < in_length) {
+ assert(false);
+ return -1;
+ }
+ memcpy(out_audio, in_audio, in_length * sizeof(int16_t));
+ return in_length / num_audio_channels;
}
- // |maxLen| is maximum number of samples for 10ms at 48kHz.
- int max_len = 480 * num_audio_channels;
- int length_in = (in_freq_hz / 100) * num_audio_channels;
- int out_len;
-
- ResamplerType type = (num_audio_channels == 1) ? kResamplerSynchronous :
- kResamplerSynchronousStereo;
-
- if (resampler_.ResetIfNeeded(in_freq_hz, out_freq_hz, type) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, 0,
- "Error in reset of resampler");
+ if (resampler_.InitializeIfNeeded(in_freq_hz, out_freq_hz,
+ num_audio_channels) != 0) {
+ LOG_FERR3(LS_ERROR, InitializeIfNeeded, in_freq_hz, out_freq_hz,
+ num_audio_channels);
return -1;
}
- if (resampler_.Push(in_audio, length_in, out_audio, max_len, out_len) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, 0,
- "Error in resampler: resampler.Push");
+ out_length =
+ resampler_.Resample(in_audio, in_length, out_audio, out_capacity_samples);
+ if (out_length == -1) {
+ LOG_FERR4(LS_ERROR,
+ Resample,
+ in_audio,
+ in_length,
+ out_audio,
+ out_capacity_samples);
return -1;
}
- return out_len / num_audio_channels;
+ return out_length / num_audio_channels;
}
} // namespace acm2
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.h b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.h
index e992955f5f3..a8fc6b6f26a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/acm_resampler.h
@@ -11,13 +11,10 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_RESAMPLER_H_
#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_RESAMPLER_H_
-#include "webrtc/common_audio/resampler/include/resampler.h"
+#include "webrtc/common_audio/resampler/include/push_resampler.h"
#include "webrtc/typedefs.h"
namespace webrtc {
-
-class CriticalSectionWrapper;
-
namespace acm2 {
class ACMResampler {
@@ -29,16 +26,14 @@ class ACMResampler {
int in_freq_hz,
int out_freq_hz,
int num_audio_channels,
+ int out_capacity_samples,
int16_t* out_audio);
private:
- // Use the Resampler class.
- Resampler resampler_;
- CriticalSectionWrapper* resampler_crit_sect_;
+ PushResampler<int16_t> resampler_;
};
} // namespace acm2
-
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_RESAMPLER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.cc
index 60ed69cb29c..eca909cc49c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.cc
@@ -13,22 +13,21 @@
#include "webrtc/common_types.h"
#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
#include "webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h"
-#include "webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
-const char kLegacyAcmVersion[] = "acm1";
-const char kExperimentalAcmVersion[] = "acm2";
-
// Create module
AudioCodingModule* AudioCodingModule::Create(int id) {
- return new acm1::AudioCodingModuleImpl(id, Clock::GetRealTimeClock());
+ return Create(id, Clock::GetRealTimeClock());
}
AudioCodingModule* AudioCodingModule::Create(int id, Clock* clock) {
- return new acm1::AudioCodingModuleImpl(id, clock);
+ AudioCodingModule::Config config;
+ config.id = id;
+ config.clock = clock;
+ return new acm2::AudioCodingModuleImpl(config);
}
// Get number of supported codecs
@@ -95,13 +94,4 @@ bool AudioCodingModule::IsCodecValid(const CodecInst& codec) {
}
}
-AudioCodingModule* AudioCodingModuleFactory::Create(int id) const {
- return new acm1::AudioCodingModuleImpl(static_cast<int32_t>(id),
- Clock::GetRealTimeClock());
-}
-
-AudioCodingModule* NewAudioCodingModuleFactory::Create(int id) const {
- return new acm2::AudioCodingModuleImpl(id);
-}
-
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.gypi b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.gypi
index f51c3bf7d74..90dad6c55c0 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module.gypi
@@ -7,16 +7,36 @@
# be found in the AUTHORS file in the root of the source tree.
{
+ 'variables': {
+ 'audio_coding_dependencies': [
+ 'CNG',
+ 'G711',
+ 'G722',
+ 'iLBC',
+ 'iSAC',
+ 'iSACFix',
+ 'PCM16B',
+ '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ ],
+ 'audio_coding_defines': [],
+ 'conditions': [
+ ['include_opus==1', {
+ 'audio_coding_dependencies': ['webrtc_opus',],
+ 'audio_coding_defines': ['WEBRTC_CODEC_OPUS',],
+ }],
+ ],
+ },
'targets': [
{
- 'target_name': 'acm2',
+ 'target_name': 'audio_coding_module',
'type': 'static_library',
'defines': [
'<@(audio_coding_defines)',
],
'dependencies': [
'<@(audio_coding_dependencies)',
- 'NetEq4',
+ 'neteq',
],
'include_dirs': [
'../interface',
@@ -93,4 +113,45 @@
],
},
],
+ 'conditions': [
+ ['include_tests==1', {
+ 'targets': [
+ {
+ 'target_name': 'delay_test',
+ 'type': 'executable',
+ 'dependencies': [
+ 'audio_coding_module',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(webrtc_root)/test/test.gyp:test_support',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ ],
+ 'sources': [
+ '../test/delay_test.cc',
+ '../test/Channel.cc',
+ '../test/PCMFile.cc',
+ '../test/utility.cc',
+ ],
+ }, # delay_test
+ {
+ 'target_name': 'insert_packet_with_timing',
+ 'type': 'executable',
+ 'dependencies': [
+ 'audio_coding_module',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(webrtc_root)/test/test.gyp:test_support',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ ],
+ 'sources': [
+ '../test/insert_packet_with_timing.cc',
+ '../test/Channel.cc',
+ '../test/PCMFile.cc',
+ ],
+ }, # delay_test
+ ],
+ }],
+ ],
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.cc
index 4c64e07dd5c..a07e8543347 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.cc
@@ -39,11 +39,11 @@ enum {
kMaxPacketSize = 2560
};
-// Maximum number of payloads that can be packed in one RED payload. For
-// regular FEC, we only pack two payloads. In case of dual-streaming, in worst
-// case we might pack 3 payloads in one RED payload.
+// Maximum number of payloads that can be packed in one RED packet. For
+// regular RED, we only pack two payloads. In case of dual-streaming, in worst
+// case we might pack 3 payloads in one RED packet.
enum {
- kNumFecFragmentationVectors = 2,
+ kNumRedFragmentationVectors = 2,
kMaxNumFragmentationVectors = 3
};
@@ -114,9 +114,10 @@ static int TimestampLessThan(uint32_t t1, uint32_t t2) {
} // namespace
-AudioCodingModuleImpl::AudioCodingModuleImpl(int id)
- : packetization_callback_(NULL),
- id_(id),
+AudioCodingModuleImpl::AudioCodingModuleImpl(
+ const AudioCodingModule::Config& config)
+ : acm_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ id_(config.id),
expected_codec_ts_(0xD87F3F9F),
expected_in_ts_(0xD87F3F9F),
send_codec_inst_(),
@@ -131,18 +132,20 @@ AudioCodingModuleImpl::AudioCodingModuleImpl(int id)
stereo_send_(false),
current_send_codec_idx_(-1),
send_codec_registered_(false),
- acm_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- vad_callback_(NULL),
+ receiver_(config),
is_first_red_(true),
- fec_enabled_(false),
- last_fec_timestamp_(0),
+ red_enabled_(false),
+ last_red_timestamp_(0),
+ codec_fec_enabled_(false),
previous_pltype_(255),
aux_rtp_header_(NULL),
receiver_initialized_(false),
- callback_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
secondary_send_codec_inst_(),
codec_timestamp_(expected_codec_ts_),
- first_10ms_data_(false) {
+ first_10ms_data_(false),
+ callback_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ packetization_callback_(NULL),
+ vad_callback_(NULL) {
// Nullify send codec memory, set payload type and set codec name to
// invalid values.
@@ -159,8 +162,6 @@ AudioCodingModuleImpl::AudioCodingModuleImpl(int id)
mirror_codec_idx_[i] = -1;
}
- receiver_.set_id(id_);
-
// Allocate memory for RED.
red_buffer_ = new uint8_t[MAX_PAYLOAD_SIZE_BYTE];
@@ -201,7 +202,7 @@ AudioCodingModuleImpl::AudioCodingModuleImpl(int id)
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"Cannot initialize receiver");
}
- WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceAudioCoding, id, "Created");
+ WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceAudioCoding, id_, "Created");
}
AudioCodingModuleImpl::~AudioCodingModuleImpl() {
@@ -349,7 +350,7 @@ int AudioCodingModuleImpl::ProcessDualStream() {
int16_t len_bytes = MAX_PAYLOAD_SIZE_BYTE;
WebRtcACMEncodingType encoding_type;
if (secondary_encoder_->Encode(red_buffer_, &len_bytes,
- &last_fec_timestamp_,
+ &last_red_timestamp_,
&encoding_type) < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"ProcessDual(): Encoding of secondary encoder Failed");
@@ -372,7 +373,7 @@ int AudioCodingModuleImpl::ProcessDualStream() {
index_primary = secondary_ready_to_encode ?
TimestampLessThan(primary_timestamp, secondary_timestamp) : 0;
index_primary += has_previous_payload ?
- TimestampLessThan(primary_timestamp, last_fec_timestamp_) : 0;
+ TimestampLessThan(primary_timestamp, last_red_timestamp_) : 0;
}
if (secondary_ready_to_encode) {
@@ -384,7 +385,7 @@ int AudioCodingModuleImpl::ProcessDualStream() {
if (has_previous_payload) {
index_previous_secondary = primary_ready_to_encode ?
- (1 - TimestampLessThan(primary_timestamp, last_fec_timestamp_)) : 0;
+ (1 - TimestampLessThan(primary_timestamp, last_red_timestamp_)) : 0;
// If secondary is ready it always have a timestamp larger than previous
// secondary. So the index is either 0 or 1.
index_previous_secondary += secondary_ready_to_encode ? 1 : 0;
@@ -405,7 +406,7 @@ int AudioCodingModuleImpl::ProcessDualStream() {
} else if (index_secondary == 0) {
current_timestamp = secondary_timestamp;
} else {
- current_timestamp = last_fec_timestamp_;
+ current_timestamp = last_red_timestamp_;
}
fragmentation_.fragmentationVectorSize = 0;
@@ -420,7 +421,7 @@ int AudioCodingModuleImpl::ProcessDualStream() {
fragmentation_.fragmentationPlType[index_previous_secondary] =
secondary_send_codec_inst_.pltype;
fragmentation_.fragmentationTimeDiff[index_previous_secondary] =
- static_cast<uint16_t>(current_timestamp - last_fec_timestamp_);
+ static_cast<uint16_t>(current_timestamp - last_red_timestamp_);
fragmentation_.fragmentationVectorSize++;
}
@@ -462,7 +463,7 @@ int AudioCodingModuleImpl::ProcessDualStream() {
{
CriticalSectionScoped lock(callback_crit_sect_);
if (packetization_callback_ != NULL) {
- // Callback with payload data, including redundant data (FEC/RED).
+ // Callback with payload data, including redundant data (RED).
if (packetization_callback_->SendData(kAudioFrameSpeech,
my_red_payload_type,
current_timestamp, stream,
@@ -495,7 +496,7 @@ int AudioCodingModuleImpl::ProcessSingleStream() {
FrameType frame_type = kAudioFrameSpeech;
uint8_t current_payload_type = 0;
bool has_data_to_send = false;
- bool fec_active = false;
+ bool red_active = false;
RTPFragmentationHeader my_fragmentation;
// Keep the scope of the ACM critical section limited.
@@ -562,15 +563,15 @@ int AudioCodingModuleImpl::ProcessSingleStream() {
// Redundancy encode is done here. The two bitstreams packetized into
// one RTP packet and the fragmentation points are set.
// Only apply RED on speech data.
- if ((fec_enabled_) &&
+ if ((red_enabled_) &&
((encoding_type == kActiveNormalEncoded) ||
(encoding_type == kPassiveNormalEncoded))) {
- // FEC is enabled within this scope.
+ // RED is enabled within this scope.
//
// Note that, a special solution exists for iSAC since it is the only
// codec for which GetRedPayload has a non-empty implementation.
//
- // Summary of the FEC scheme below (use iSAC as example):
+ // Summary of the RED scheme below (use iSAC as example):
//
// 1st (is_first_red_ is true) encoded iSAC frame (primary #1) =>
// - call GetRedPayload() and store redundancy for packet #1 in
@@ -581,7 +582,7 @@ int AudioCodingModuleImpl::ProcessSingleStream() {
// - store primary #2 in 1st fragment of RED buffer and send the
// combined packet
// - the transmitted packet contains primary #2 (new) and
- // reduncancy for packet #1 (old)
+ // redundancy for packet #1 (old)
// - call GetRed_Payload() and store redundancy for packet #2 in
// second fragment of RED buffer
//
@@ -604,19 +605,19 @@ int AudioCodingModuleImpl::ProcessSingleStream() {
//
// Hence, even if every second packet is dropped, perfect
// reconstruction is possible.
- fec_active = true;
+ red_active = true;
has_data_to_send = false;
// Skip the following part for the first packet in a RED session.
if (!is_first_red_) {
- // Rearrange stream such that FEC packets are included.
+ // Rearrange stream such that RED packets are included.
// Replace stream now that we have stored current stream.
memcpy(stream + fragmentation_.fragmentationOffset[1], red_buffer_,
fragmentation_.fragmentationLength[1]);
// Update the fragmentation time difference vector, in number of
// timestamps.
uint16_t time_since_last = static_cast<uint16_t>(
- rtp_timestamp - last_fec_timestamp_);
+ rtp_timestamp - last_red_timestamp_);
// Update fragmentation vectors.
fragmentation_.fragmentationPlType[1] =
@@ -630,7 +631,7 @@ int AudioCodingModuleImpl::ProcessSingleStream() {
// Insert new packet payload type.
fragmentation_.fragmentationPlType[0] = current_payload_type;
- last_fec_timestamp_ = rtp_timestamp;
+ last_red_timestamp_ = rtp_timestamp;
// Can be modified by the GetRedPayload() call if iSAC is utilized.
red_length_bytes = length_bytes;
@@ -650,7 +651,7 @@ int AudioCodingModuleImpl::ProcessSingleStream() {
if (codecs_[current_send_codec_idx_]->GetRedPayload(
red_buffer_, &red_length_bytes) == -1) {
// The codec was not iSAC => use current encoder output as redundant
- // data instead (trivial FEC scheme).
+ // data instead (trivial RED scheme).
memcpy(red_buffer_, stream, red_length_bytes);
}
@@ -658,7 +659,7 @@ int AudioCodingModuleImpl::ProcessSingleStream() {
// Update payload type with RED payload type.
current_payload_type = red_pltype_;
// We have packed 2 payloads.
- fragmentation_.fragmentationVectorSize = kNumFecFragmentationVectors;
+ fragmentation_.fragmentationVectorSize = kNumRedFragmentationVectors;
// Copy to local variable, as it will be used outside ACM lock.
my_fragmentation.CopyFrom(fragmentation_);
@@ -672,8 +673,8 @@ int AudioCodingModuleImpl::ProcessSingleStream() {
CriticalSectionScoped lock(callback_crit_sect_);
if (packetization_callback_ != NULL) {
- if (fec_active) {
- // Callback with payload data, including redundant data (FEC/RED).
+ if (red_active) {
+ // Callback with payload data, including redundant data (RED).
packetization_callback_->SendData(frame_type, current_payload_type,
rtp_timestamp, stream, length_bytes,
&my_fragmentation);
@@ -713,14 +714,14 @@ int AudioCodingModuleImpl::InitializeSender() {
}
}
- // Initialize FEC/RED.
+ // Initialize RED.
is_first_red_ = true;
- if (fec_enabled_ || secondary_encoder_.get() != NULL) {
+ if (red_enabled_ || secondary_encoder_.get() != NULL) {
if (red_buffer_ != NULL) {
memset(red_buffer_, 0, MAX_PAYLOAD_SIZE_BYTE);
}
- if (fec_enabled_) {
- ResetFragmentation(kNumFecFragmentationVectors);
+ if (red_enabled_) {
+ ResetFragmentation(kNumRedFragmentationVectors);
} else {
ResetFragmentation(0);
}
@@ -1031,10 +1032,20 @@ int AudioCodingModuleImpl::RegisterSendCodec(const CodecInst& send_codec) {
// Everything is fine so we can replace the previous codec with this one.
if (send_codec_registered_) {
- // If we change codec we start fresh with FEC.
+ // If we change codec we start fresh with RED.
// This is not strictly required by the standard.
is_first_red_ = true;
codec_ptr->SetVAD(&dtx_enabled_, &vad_enabled_, &vad_mode_);
+
+ if (!codec_ptr->HasInternalFEC()) {
+ codec_fec_enabled_ = false;
+ } else {
+ if (codec_ptr->SetFEC(codec_fec_enabled_) < 0) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "Cannot set codec FEC");
+ return -1;
+ }
+ }
}
current_send_codec_idx_ = codec_id;
@@ -1120,8 +1131,18 @@ int AudioCodingModuleImpl::RegisterSendCodec(const CodecInst& send_codec) {
}
send_codec_inst_.rate = send_codec.rate;
}
- previous_pltype_ = send_codec_inst_.pltype;
+ if (!codecs_[codec_id]->HasInternalFEC()) {
+ codec_fec_enabled_ = false;
+ } else {
+ if (codecs_[codec_id]->SetFEC(codec_fec_enabled_) < 0) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "Cannot set codec FEC");
+ return -1;
+ }
+ }
+
+ previous_pltype_ = send_codec_inst_.pltype;
return 0;
}
}
@@ -1205,11 +1226,7 @@ int AudioCodingModuleImpl::Add10MsData(
return -1;
}
- // Allow for 8, 16, 32 and 48kHz input audio.
- if ((audio_frame.sample_rate_hz_ != 8000)
- && (audio_frame.sample_rate_hz_ != 16000)
- && (audio_frame.sample_rate_hz_ != 32000)
- && (audio_frame.sample_rate_hz_ != 48000)) {
+ if (audio_frame.sample_rate_hz_ > 48000) {
assert(false);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"Cannot Add 10 ms audio, input frequency not valid");
@@ -1365,13 +1382,17 @@ int AudioCodingModuleImpl::PreprocessToAddData(const AudioFrame& in_frame,
// The result of the resampler is written to output frame.
dest_ptr_audio = preprocess_frame_.data_;
- preprocess_frame_.samples_per_channel_ = resampler_.Resample10Msec(
- src_ptr_audio, in_frame.sample_rate_hz_, send_codec_inst_.plfreq,
- preprocess_frame_.num_channels_, dest_ptr_audio);
+ preprocess_frame_.samples_per_channel_ =
+ resampler_.Resample10Msec(src_ptr_audio,
+ in_frame.sample_rate_hz_,
+ send_codec_inst_.plfreq,
+ preprocess_frame_.num_channels_,
+ AudioFrame::kMaxDataSizeSamples,
+ dest_ptr_audio);
if (preprocess_frame_.samples_per_channel_ < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot add 10 ms audio, resmapling failed");
+ "Cannot add 10 ms audio, resampling failed");
return -1;
}
preprocess_frame_.sample_rate_hz_ = send_codec_inst_.plfreq;
@@ -1384,42 +1405,87 @@ int AudioCodingModuleImpl::PreprocessToAddData(const AudioFrame& in_frame,
}
/////////////////////////////////////////
-// (FEC) Forward Error Correction
+// (RED) Redundant Coding
//
-bool AudioCodingModuleImpl::FECStatus() const {
+bool AudioCodingModuleImpl::REDStatus() const {
CriticalSectionScoped lock(acm_crit_sect_);
- return fec_enabled_;
+
+ return red_enabled_;
}
-// Configure FEC status i.e on/off.
-int AudioCodingModuleImpl::SetFECStatus(
+// Configure RED status i.e on/off.
+int AudioCodingModuleImpl::SetREDStatus(
#ifdef WEBRTC_CODEC_RED
- bool enable_fec) {
+ bool enable_red) {
CriticalSectionScoped lock(acm_crit_sect_);
- if (fec_enabled_ != enable_fec) {
+ if (enable_red == true && codec_fec_enabled_ == true) {
+ WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
+ "Codec internal FEC and RED cannot be co-enabled.");
+ return -1;
+ }
+
+ if (red_enabled_ != enable_red) {
// Reset the RED buffer.
memset(red_buffer_, 0, MAX_PAYLOAD_SIZE_BYTE);
// Reset fragmentation buffers.
- ResetFragmentation(kNumFecFragmentationVectors);
- // Set fec_enabled_.
- fec_enabled_ = enable_fec;
+ ResetFragmentation(kNumRedFragmentationVectors);
+ // Set red_enabled_.
+ red_enabled_ = enable_red;
}
- is_first_red_ = true; // Make sure we restart FEC.
+ is_first_red_ = true; // Make sure we restart RED.
return 0;
#else
- bool /* enable_fec */) {
- fec_enabled_ = false;
+ bool /* enable_red */) {
+ red_enabled_ = false;
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
- " WEBRTC_CODEC_RED is undefined => fec_enabled_ = %d",
- fec_enabled_);
+ " WEBRTC_CODEC_RED is undefined => red_enabled_ = %d",
+ red_enabled_);
return -1;
#endif
}
/////////////////////////////////////////
+// (FEC) Forward Error Correction (codec internal)
+//
+
+bool AudioCodingModuleImpl::CodecFEC() const {
+ return codec_fec_enabled_;
+}
+
+int AudioCodingModuleImpl::SetCodecFEC(bool enable_codec_fec) {
+ CriticalSectionScoped lock(acm_crit_sect_);
+
+ if (enable_codec_fec == true && red_enabled_ == true) {
+ WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
+ "Codec internal FEC and RED cannot be co-enabled.");
+ return -1;
+ }
+
+ // Set codec FEC.
+ if (HaveValidEncoder("SetCodecFEC") &&
+ codecs_[current_send_codec_idx_]->SetFEC(enable_codec_fec) < 0) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "Set codec internal FEC failed.");
+ return -1;
+ }
+ codec_fec_enabled_ = enable_codec_fec;
+ return 0;
+}
+
+int AudioCodingModuleImpl::SetPacketLossRate(int loss_rate) {
+ if (HaveValidEncoder("SetPacketLossRate") &&
+ codecs_[current_send_codec_idx_]->SetPacketLossRate(loss_rate) < 0) {
+ WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
+ "Set packet loss rate failed.");
+ return -1;
+ }
+ return 0;
+}
+
+/////////////////////////////////////////
// (VAD) Voice Activity Detection
//
int AudioCodingModuleImpl::SetVAD(bool enable_dtx,
@@ -1710,8 +1776,6 @@ int AudioCodingModuleImpl::PlayoutData10Ms(int desired_freq_hz,
}
audio_frame->id_ = id_;
- audio_frame->energy_ = 0;
- audio_frame->timestamp_ = 0;
return 0;
}
@@ -1770,6 +1834,7 @@ int AudioCodingModuleImpl::IncomingPayload(const uint8_t* incoming_payload,
aux_rtp_header_->type.Audio.channel = 1;
}
+ aux_rtp_header_->header.timestamp = timestamp;
IncomingPacket(incoming_payload, payload_length, *aux_rtp_header_);
// Get ready for the next payload.
aux_rtp_header_->header.sequenceNumber++;
@@ -1851,8 +1916,7 @@ int AudioCodingModuleImpl::ConfigISACBandwidthEstimator(
}
int AudioCodingModuleImpl::PlayoutTimestamp(uint32_t* timestamp) {
- *timestamp = receiver_.PlayoutTimestamp();
- return 0;
+ return receiver_.GetPlayoutTimestamp(timestamp) ? 0 : -1;
}
bool AudioCodingModuleImpl::HaveValidEncoder(const char* caller_name) const {
@@ -1976,10 +2040,6 @@ int AudioCodingModuleImpl::LeastRequiredDelayMs() const {
return receiver_.LeastRequiredDelayMs();
}
-const char* AudioCodingModuleImpl::Version() const {
- return kExperimentalAcmVersion;
-}
-
void AudioCodingModuleImpl::GetDecodingCallStatistics(
AudioDecodingCallStats* call_stats) const {
receiver_.GetDecodingCallStatistics(call_stats);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h
index bc4ea0f7a66..e54202bf6da 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_impl.h
@@ -19,11 +19,11 @@
#include "webrtc/modules/audio_coding/main/acm2/acm_receiver.h"
#include "webrtc/modules/audio_coding/main/acm2/acm_resampler.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
namespace webrtc {
class CriticalSectionWrapper;
-class RWLockWrapper;
namespace acm2 {
@@ -32,11 +32,9 @@ class ACMGenericCodec;
class AudioCodingModuleImpl : public AudioCodingModule {
public:
- explicit AudioCodingModuleImpl(int id);
+ explicit AudioCodingModuleImpl(const AudioCodingModule::Config& config);
~AudioCodingModuleImpl();
- virtual const char* Version() const;
-
// Change the unique identifier of this object.
virtual int32_t ChangeUniqueId(const int32_t id);
@@ -94,14 +92,27 @@ class AudioCodingModuleImpl : public AudioCodingModule {
int Add10MsData(const AudioFrame& audio_frame);
/////////////////////////////////////////
- // (FEC) Forward Error Correction
+ // (RED) Redundant Coding
+ //
+
+ // Configure RED status i.e. on/off.
+ int SetREDStatus(bool enable_red);
+
+ // Get RED status.
+ bool REDStatus() const;
+
+ /////////////////////////////////////////
+ // (FEC) Forward Error Correction (codec internal)
//
- // Configure FEC status i.e on/off.
- int SetFECStatus(bool enable_fec);
+ // Configure FEC status i.e. on/off.
+ int SetCodecFEC(bool enabled_codec_fec);
// Get FEC status.
- bool FECStatus() const;
+ bool CodecFEC() const;
+
+ // Set target packet loss rate
+ int SetPacketLossRate(int loss_rate);
/////////////////////////////////////////
// (VAD) Voice Activity Detection
@@ -235,13 +246,14 @@ class AudioCodingModuleImpl : public AudioCodingModule {
ACMGenericCodec* CreateCodec(const CodecInst& codec);
- int InitializeReceiverSafe();
+ int InitializeReceiverSafe() EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
bool HaveValidEncoder(const char* caller_name) const;
// Set VAD/DTX status. This function does not acquire a lock, and it is
// created to be called only from inside a critical section.
- int SetVADSafe(bool enable_dtx, bool enable_vad, ACMVADMode mode);
+ int SetVADSafe(bool enable_dtx, bool enable_vad, ACMVADMode mode)
+ EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
// Process buffered audio when dual-streaming is not enabled (When RED is
// enabled still this function is used.)
@@ -263,18 +275,22 @@ class AudioCodingModuleImpl : public AudioCodingModule {
// -1: if encountering an error.
// 0: otherwise.
int PreprocessToAddData(const AudioFrame& in_frame,
- const AudioFrame** ptr_out);
+ const AudioFrame** ptr_out)
+ EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
// Change required states after starting to receive the codec corresponding
// to |index|.
int UpdateUponReceivingCodec(int index);
- int EncodeFragmentation(int fragmentation_index, int payload_type,
+ int EncodeFragmentation(int fragmentation_index,
+ int payload_type,
uint32_t current_timestamp,
ACMGenericCodec* encoder,
- uint8_t* stream);
+ uint8_t* stream)
+ EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
- void ResetFragmentation(int vector_size);
+ void ResetFragmentation(int vector_size)
+ EXCLUSIVE_LOCKS_REQUIRED(acm_crit_sect_);
// Get a pointer to AudioDecoder of the given codec. For some codecs, e.g.
// iSAC, encoding and decoding have to be performed on a shared
@@ -289,50 +305,50 @@ class AudioCodingModuleImpl : public AudioCodingModule {
int GetAudioDecoder(const CodecInst& codec, int codec_id,
int mirror_id, AudioDecoder** decoder);
- AudioPacketizationCallback* packetization_callback_;
-
- int id_;
- uint32_t expected_codec_ts_;
- uint32_t expected_in_ts_;
- CodecInst send_codec_inst_;
-
- uint8_t cng_nb_pltype_;
- uint8_t cng_wb_pltype_;
- uint8_t cng_swb_pltype_;
- uint8_t cng_fb_pltype_;
-
- uint8_t red_pltype_;
- bool vad_enabled_;
- bool dtx_enabled_;
- ACMVADMode vad_mode_;
+ CriticalSectionWrapper* acm_crit_sect_;
+ int id_; // TODO(henrik.lundin) Make const.
+ uint32_t expected_codec_ts_ GUARDED_BY(acm_crit_sect_);
+ uint32_t expected_in_ts_ GUARDED_BY(acm_crit_sect_);
+ CodecInst send_codec_inst_ GUARDED_BY(acm_crit_sect_);
+
+ uint8_t cng_nb_pltype_ GUARDED_BY(acm_crit_sect_);
+ uint8_t cng_wb_pltype_ GUARDED_BY(acm_crit_sect_);
+ uint8_t cng_swb_pltype_ GUARDED_BY(acm_crit_sect_);
+ uint8_t cng_fb_pltype_ GUARDED_BY(acm_crit_sect_);
+
+ uint8_t red_pltype_ GUARDED_BY(acm_crit_sect_);
+ bool vad_enabled_ GUARDED_BY(acm_crit_sect_);
+ bool dtx_enabled_ GUARDED_BY(acm_crit_sect_);
+ ACMVADMode vad_mode_ GUARDED_BY(acm_crit_sect_);
ACMGenericCodec* codecs_[ACMCodecDB::kMaxNumCodecs];
int mirror_codec_idx_[ACMCodecDB::kMaxNumCodecs];
- bool stereo_send_;
+ bool stereo_send_ GUARDED_BY(acm_crit_sect_);
int current_send_codec_idx_;
bool send_codec_registered_;
- ACMResampler resampler_;
+ ACMResampler resampler_ GUARDED_BY(acm_crit_sect_);
AcmReceiver receiver_;
- CriticalSectionWrapper* acm_crit_sect_;
- ACMVADCallback* vad_callback_;
- // RED/FEC.
- bool is_first_red_;
- bool fec_enabled_;
+ // RED.
+ bool is_first_red_ GUARDED_BY(acm_crit_sect_);
+ bool red_enabled_ GUARDED_BY(acm_crit_sect_);
// TODO(turajs): |red_buffer_| is allocated in constructor, why having them
// as pointers and not an array. If concerned about the memory, then make a
// set-up function to allocate them only when they are going to be used, i.e.
- // FEC or Dual-streaming is enabled.
- uint8_t* red_buffer_;
+ // RED or Dual-streaming is enabled.
+ uint8_t* red_buffer_ GUARDED_BY(acm_crit_sect_);
// TODO(turajs): we actually don't need |fragmentation_| as a member variable.
// It is sufficient to keep the length & payload type of previous payload in
// member variables.
- RTPFragmentationHeader fragmentation_;
- uint32_t last_fec_timestamp_;
+ RTPFragmentationHeader fragmentation_ GUARDED_BY(acm_crit_sect_);
+ uint32_t last_red_timestamp_ GUARDED_BY(acm_crit_sect_);
+
+ // Codec internal FEC
+ bool codec_fec_enabled_;
// This is to keep track of CN instances where we can send DTMFs.
- uint8_t previous_pltype_;
+ uint8_t previous_pltype_ GUARDED_BY(acm_crit_sect_);
// Used when payloads are pushed into ACM without any RTP info
// One example is when pre-encoded bit-stream is pushed from
@@ -342,15 +358,18 @@ class AudioCodingModuleImpl : public AudioCodingModule {
// be used in other methods, locks need to be taken.
WebRtcRTPHeader* aux_rtp_header_;
- bool receiver_initialized_;
+ bool receiver_initialized_ GUARDED_BY(acm_crit_sect_);
- CriticalSectionWrapper* callback_crit_sect_;
+ AudioFrame preprocess_frame_ GUARDED_BY(acm_crit_sect_);
+ CodecInst secondary_send_codec_inst_ GUARDED_BY(acm_crit_sect_);
+ scoped_ptr<ACMGenericCodec> secondary_encoder_ GUARDED_BY(acm_crit_sect_);
+ uint32_t codec_timestamp_ GUARDED_BY(acm_crit_sect_);
+ bool first_10ms_data_ GUARDED_BY(acm_crit_sect_);
- AudioFrame preprocess_frame_;
- CodecInst secondary_send_codec_inst_;
- scoped_ptr<ACMGenericCodec> secondary_encoder_;
- uint32_t codec_timestamp_;
- bool first_10ms_data_;
+ CriticalSectionWrapper* callback_crit_sect_;
+ AudioPacketizationCallback* packetization_callback_
+ GUARDED_BY(callback_crit_sect_);
+ ACMVADCallback* vad_callback_ GUARDED_BY(callback_crit_sect_);
};
} // namespace acm2
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_unittest.cc
new file mode 100644
index 00000000000..37cd70e5e84
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/audio_coding_module_unittest.cc
@@ -0,0 +1,514 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+#include <vector>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h"
+#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
+#include "webrtc/modules/audio_coding/neteq/tools/audio_loop.h"
+#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/system_wrappers/interface/clock.h"
+#include "webrtc/system_wrappers/interface/compile_assert.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/event_wrapper.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/sleep.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
+#include "webrtc/system_wrappers/interface/thread_wrapper.h"
+#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/test/testsupport/gtest_disable.h"
+
+namespace webrtc {
+
+const int kSampleRateHz = 16000;
+const int kNumSamples10ms = kSampleRateHz / 100;
+const int kFrameSizeMs = 10; // Multiple of 10.
+const int kFrameSizeSamples = kFrameSizeMs / 10 * kNumSamples10ms;
+const int kPayloadSizeBytes = kFrameSizeSamples * sizeof(int16_t);
+const uint8_t kPayloadType = 111;
+
+class RtpUtility {
+ public:
+ RtpUtility(int samples_per_packet, uint8_t payload_type)
+ : samples_per_packet_(samples_per_packet), payload_type_(payload_type) {}
+
+ virtual ~RtpUtility() {}
+
+ void Populate(WebRtcRTPHeader* rtp_header) {
+ rtp_header->header.sequenceNumber = 0xABCD;
+ rtp_header->header.timestamp = 0xABCDEF01;
+ rtp_header->header.payloadType = payload_type_;
+ rtp_header->header.markerBit = false;
+ rtp_header->header.ssrc = 0x1234;
+ rtp_header->header.numCSRCs = 0;
+ rtp_header->frameType = kAudioFrameSpeech;
+
+ rtp_header->header.payload_type_frequency = kSampleRateHz;
+ rtp_header->type.Audio.channel = 1;
+ rtp_header->type.Audio.isCNG = false;
+ }
+
+ void Forward(WebRtcRTPHeader* rtp_header) {
+ ++rtp_header->header.sequenceNumber;
+ rtp_header->header.timestamp += samples_per_packet_;
+ }
+
+ private:
+ int samples_per_packet_;
+ uint8_t payload_type_;
+};
+
+class PacketizationCallbackStub : public AudioPacketizationCallback {
+ public:
+ PacketizationCallbackStub()
+ : num_calls_(0),
+ crit_sect_(CriticalSectionWrapper::CreateCriticalSection()) {}
+
+ virtual int32_t SendData(
+ FrameType frame_type,
+ uint8_t payload_type,
+ uint32_t timestamp,
+ const uint8_t* payload_data,
+ uint16_t payload_len_bytes,
+ const RTPFragmentationHeader* fragmentation) OVERRIDE {
+ CriticalSectionScoped lock(crit_sect_.get());
+ ++num_calls_;
+ last_payload_vec_.assign(payload_data, payload_data + payload_len_bytes);
+ return 0;
+ }
+
+ int num_calls() const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ return num_calls_;
+ }
+
+ int last_payload_len_bytes() const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ return last_payload_vec_.size();
+ }
+
+ void SwapBuffers(std::vector<uint8_t>* payload) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ last_payload_vec_.swap(*payload);
+ }
+
+ private:
+ int num_calls_ GUARDED_BY(crit_sect_);
+ std::vector<uint8_t> last_payload_vec_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<CriticalSectionWrapper> crit_sect_;
+};
+
+class AudioCodingModuleTest : public ::testing::Test {
+ protected:
+ AudioCodingModuleTest()
+ : id_(1),
+ rtp_utility_(new RtpUtility(kFrameSizeSamples, kPayloadType)),
+ clock_(Clock::GetRealTimeClock()) {}
+
+ ~AudioCodingModuleTest() {}
+
+ void TearDown() {}
+
+ void SetUp() {
+ acm_.reset(AudioCodingModule::Create(id_, clock_));
+
+ RegisterCodec();
+
+ rtp_utility_->Populate(&rtp_header_);
+
+ input_frame_.sample_rate_hz_ = kSampleRateHz;
+ input_frame_.num_channels_ = 1;
+ input_frame_.samples_per_channel_ = kSampleRateHz * 10 / 1000; // 10 ms.
+ COMPILE_ASSERT(kSampleRateHz * 10 / 1000 <= AudioFrame::kMaxDataSizeSamples,
+ audio_frame_too_small);
+ memset(input_frame_.data_,
+ 0,
+ input_frame_.samples_per_channel_ * sizeof(input_frame_.data_[0]));
+
+ ASSERT_EQ(0, acm_->RegisterTransportCallback(&packet_cb_));
+ }
+
+ virtual void RegisterCodec() {
+ AudioCodingModule::Codec("L16", &codec_, kSampleRateHz, 1);
+ codec_.pltype = kPayloadType;
+
+ // Register L16 codec in ACM.
+ ASSERT_EQ(0, acm_->RegisterReceiveCodec(codec_));
+ ASSERT_EQ(0, acm_->RegisterSendCodec(codec_));
+ }
+
+ virtual void InsertPacketAndPullAudio() {
+ InsertPacket();
+ PullAudio();
+ }
+
+ virtual void InsertPacket() {
+ const uint8_t kPayload[kPayloadSizeBytes] = {0};
+ ASSERT_EQ(0,
+ acm_->IncomingPacket(kPayload, kPayloadSizeBytes, rtp_header_));
+ rtp_utility_->Forward(&rtp_header_);
+ }
+
+ virtual void PullAudio() {
+ AudioFrame audio_frame;
+ ASSERT_EQ(0, acm_->PlayoutData10Ms(-1, &audio_frame));
+ }
+
+ virtual void InsertAudio() {
+ ASSERT_EQ(0, acm_->Add10MsData(input_frame_));
+ input_frame_.timestamp_ += kNumSamples10ms;
+ }
+
+ virtual void Encode() {
+ int32_t encoded_bytes = acm_->Process();
+ // Expect to get one packet with two bytes per sample, or no packet at all,
+ // depending on how many 10 ms blocks go into |codec_.pacsize|.
+ EXPECT_TRUE(encoded_bytes == 2 * codec_.pacsize || encoded_bytes == 0);
+ }
+
+ const int id_;
+ scoped_ptr<RtpUtility> rtp_utility_;
+ scoped_ptr<AudioCodingModule> acm_;
+ PacketizationCallbackStub packet_cb_;
+ WebRtcRTPHeader rtp_header_;
+ AudioFrame input_frame_;
+ CodecInst codec_;
+ Clock* clock_;
+};
+
+// Check if the statistics are initialized correctly. Before any call to ACM
+// all fields have to be zero.
+TEST_F(AudioCodingModuleTest, DISABLED_ON_ANDROID(InitializedToZero)) {
+ AudioDecodingCallStats stats;
+ acm_->GetDecodingCallStatistics(&stats);
+ EXPECT_EQ(0, stats.calls_to_neteq);
+ EXPECT_EQ(0, stats.calls_to_silence_generator);
+ EXPECT_EQ(0, stats.decoded_normal);
+ EXPECT_EQ(0, stats.decoded_cng);
+ EXPECT_EQ(0, stats.decoded_plc);
+ EXPECT_EQ(0, stats.decoded_plc_cng);
+}
+
+// Apply an initial playout delay. Calls to AudioCodingModule::PlayoutData10ms()
+// should result in generating silence, check the associated field.
+TEST_F(AudioCodingModuleTest, DISABLED_ON_ANDROID(SilenceGeneratorCalled)) {
+ AudioDecodingCallStats stats;
+ const int kInitialDelay = 100;
+
+ acm_->SetInitialPlayoutDelay(kInitialDelay);
+
+ int num_calls = 0;
+ for (int time_ms = 0; time_ms < kInitialDelay;
+ time_ms += kFrameSizeMs, ++num_calls) {
+ InsertPacketAndPullAudio();
+ }
+ acm_->GetDecodingCallStatistics(&stats);
+ EXPECT_EQ(0, stats.calls_to_neteq);
+ EXPECT_EQ(num_calls, stats.calls_to_silence_generator);
+ EXPECT_EQ(0, stats.decoded_normal);
+ EXPECT_EQ(0, stats.decoded_cng);
+ EXPECT_EQ(0, stats.decoded_plc);
+ EXPECT_EQ(0, stats.decoded_plc_cng);
+}
+
+// Insert some packets and pull audio. Check statistics are valid. Then,
+// simulate packet loss and check if PLC and PLC-to-CNG statistics are
+// correctly updated.
+TEST_F(AudioCodingModuleTest, DISABLED_ON_ANDROID(NetEqCalls)) {
+ AudioDecodingCallStats stats;
+ const int kNumNormalCalls = 10;
+
+ for (int num_calls = 0; num_calls < kNumNormalCalls; ++num_calls) {
+ InsertPacketAndPullAudio();
+ }
+ acm_->GetDecodingCallStatistics(&stats);
+ EXPECT_EQ(kNumNormalCalls, stats.calls_to_neteq);
+ EXPECT_EQ(0, stats.calls_to_silence_generator);
+ EXPECT_EQ(kNumNormalCalls, stats.decoded_normal);
+ EXPECT_EQ(0, stats.decoded_cng);
+ EXPECT_EQ(0, stats.decoded_plc);
+ EXPECT_EQ(0, stats.decoded_plc_cng);
+
+ const int kNumPlc = 3;
+ const int kNumPlcCng = 5;
+
+ // Simulate packet-loss. NetEq first performs PLC then PLC fades to CNG.
+ for (int n = 0; n < kNumPlc + kNumPlcCng; ++n) {
+ PullAudio();
+ }
+ acm_->GetDecodingCallStatistics(&stats);
+ EXPECT_EQ(kNumNormalCalls + kNumPlc + kNumPlcCng, stats.calls_to_neteq);
+ EXPECT_EQ(0, stats.calls_to_silence_generator);
+ EXPECT_EQ(kNumNormalCalls, stats.decoded_normal);
+ EXPECT_EQ(0, stats.decoded_cng);
+ EXPECT_EQ(kNumPlc, stats.decoded_plc);
+ EXPECT_EQ(kNumPlcCng, stats.decoded_plc_cng);
+}
+
+TEST_F(AudioCodingModuleTest, VerifyOutputFrame) {
+ AudioFrame audio_frame;
+ const int kSampleRateHz = 32000;
+ EXPECT_EQ(0, acm_->PlayoutData10Ms(kSampleRateHz, &audio_frame));
+ EXPECT_EQ(id_, audio_frame.id_);
+ EXPECT_EQ(0u, audio_frame.timestamp_);
+ EXPECT_GT(audio_frame.num_channels_, 0);
+ EXPECT_EQ(kSampleRateHz / 100, audio_frame.samples_per_channel_);
+ EXPECT_EQ(kSampleRateHz, audio_frame.sample_rate_hz_);
+}
+
+TEST_F(AudioCodingModuleTest, FailOnZeroDesiredFrequency) {
+ AudioFrame audio_frame;
+ EXPECT_EQ(-1, acm_->PlayoutData10Ms(0, &audio_frame));
+}
+
+// A multi-threaded test for ACM. This base class is using the PCM16b 16 kHz
+// codec, while the derive class AcmIsacMtTest is using iSAC.
+class AudioCodingModuleMtTest : public AudioCodingModuleTest {
+ protected:
+ static const int kNumPackets = 500;
+ static const int kNumPullCalls = 500;
+
+ AudioCodingModuleMtTest()
+ : AudioCodingModuleTest(),
+ send_thread_(ThreadWrapper::CreateThread(CbSendThread,
+ this,
+ kRealtimePriority,
+ "send")),
+ insert_packet_thread_(ThreadWrapper::CreateThread(CbInsertPacketThread,
+ this,
+ kRealtimePriority,
+ "insert_packet")),
+ pull_audio_thread_(ThreadWrapper::CreateThread(CbPullAudioThread,
+ this,
+ kRealtimePriority,
+ "pull_audio")),
+ test_complete_(EventWrapper::Create()),
+ send_count_(0),
+ insert_packet_count_(0),
+ pull_audio_count_(0),
+ crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ next_insert_packet_time_ms_(0),
+ fake_clock_(new SimulatedClock(0)) {
+ clock_ = fake_clock_.get();
+ }
+
+ void SetUp() {
+ AudioCodingModuleTest::SetUp();
+ StartThreads();
+ }
+
+ void StartThreads() {
+ unsigned int thread_id = 0;
+ ASSERT_TRUE(send_thread_->Start(thread_id));
+ ASSERT_TRUE(insert_packet_thread_->Start(thread_id));
+ ASSERT_TRUE(pull_audio_thread_->Start(thread_id));
+ }
+
+ void TearDown() {
+ AudioCodingModuleTest::TearDown();
+ pull_audio_thread_->Stop();
+ send_thread_->Stop();
+ insert_packet_thread_->Stop();
+ }
+
+ EventTypeWrapper RunTest() {
+ return test_complete_->Wait(10 * 60 * 1000); // 10 minutes' timeout.
+ }
+
+ virtual bool TestDone() {
+ if (packet_cb_.num_calls() > kNumPackets) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (pull_audio_count_ > kNumPullCalls) {
+ // Both conditions for completion are met. End the test.
+ return true;
+ }
+ }
+ return false;
+ }
+
+ static bool CbSendThread(void* context) {
+ return reinterpret_cast<AudioCodingModuleMtTest*>(context)->CbSendImpl();
+ }
+
+ // The send thread doesn't have to care about the current simulated time,
+ // since only the AcmReceiver is using the clock.
+ bool CbSendImpl() {
+ SleepMs(1);
+ if (HasFatalFailure()) {
+ // End the test early if a fatal failure (ASSERT_*) has occurred.
+ test_complete_->Set();
+ }
+ ++send_count_;
+ InsertAudio();
+ Encode();
+ if (TestDone()) {
+ test_complete_->Set();
+ }
+ return true;
+ }
+
+ static bool CbInsertPacketThread(void* context) {
+ return reinterpret_cast<AudioCodingModuleMtTest*>(context)
+ ->CbInsertPacketImpl();
+ }
+
+ bool CbInsertPacketImpl() {
+ SleepMs(1);
+ {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (clock_->TimeInMilliseconds() < next_insert_packet_time_ms_) {
+ return true;
+ }
+ next_insert_packet_time_ms_ += 10;
+ }
+ // Now we're not holding the crit sect when calling ACM.
+ ++insert_packet_count_;
+ InsertPacket();
+ return true;
+ }
+
+ static bool CbPullAudioThread(void* context) {
+ return reinterpret_cast<AudioCodingModuleMtTest*>(context)
+ ->CbPullAudioImpl();
+ }
+
+ bool CbPullAudioImpl() {
+ SleepMs(1);
+ {
+ CriticalSectionScoped lock(crit_sect_.get());
+ // Don't let the insert thread fall behind.
+ if (next_insert_packet_time_ms_ < clock_->TimeInMilliseconds()) {
+ return true;
+ }
+ ++pull_audio_count_;
+ }
+ // Now we're not holding the crit sect when calling ACM.
+ PullAudio();
+ fake_clock_->AdvanceTimeMilliseconds(10);
+ return true;
+ }
+
+ scoped_ptr<ThreadWrapper> send_thread_;
+ scoped_ptr<ThreadWrapper> insert_packet_thread_;
+ scoped_ptr<ThreadWrapper> pull_audio_thread_;
+ const scoped_ptr<EventWrapper> test_complete_;
+ int send_count_;
+ int insert_packet_count_;
+ int pull_audio_count_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ int64_t next_insert_packet_time_ms_ GUARDED_BY(crit_sect_);
+ scoped_ptr<SimulatedClock> fake_clock_;
+};
+
+TEST_F(AudioCodingModuleMtTest, DoTest) {
+ EXPECT_EQ(kEventSignaled, RunTest());
+}
+
+// This is a multi-threaded ACM test using iSAC. The test encodes audio
+// from a PCM file. The most recent encoded frame is used as input to the
+// receiving part. Depending on timing, it may happen that the same RTP packet
+// is inserted into the receiver multiple times, but this is a valid use-case,
+// and simplifies the test code a lot.
+class AcmIsacMtTest : public AudioCodingModuleMtTest {
+ protected:
+ static const int kNumPackets = 500;
+ static const int kNumPullCalls = 500;
+
+ AcmIsacMtTest()
+ : AudioCodingModuleMtTest(),
+ last_packet_number_(0) {}
+
+ ~AcmIsacMtTest() {}
+
+ void SetUp() {
+ AudioCodingModuleTest::SetUp();
+
+ // Set up input audio source to read from specified file, loop after 5
+ // seconds, and deliver blocks of 10 ms.
+ const std::string input_file_name =
+ webrtc::test::ResourcePath("audio_coding/speech_mono_16kHz", "pcm");
+ audio_loop_.Init(input_file_name, 5 * kSampleRateHz, kNumSamples10ms);
+
+ // Generate one packet to have something to insert.
+ int loop_counter = 0;
+ while (packet_cb_.last_payload_len_bytes() == 0) {
+ InsertAudio();
+ Encode();
+ ASSERT_LT(loop_counter++, 10);
+ }
+ // Set |last_packet_number_| to one less that |num_calls| so that the packet
+ // will be fetched in the next InsertPacket() call.
+ last_packet_number_ = packet_cb_.num_calls() - 1;
+
+ StartThreads();
+ }
+
+ virtual void RegisterCodec() {
+ COMPILE_ASSERT(kSampleRateHz == 16000, test_designed_for_isac_16khz);
+ AudioCodingModule::Codec("ISAC", &codec_, kSampleRateHz, 1);
+ codec_.pltype = kPayloadType;
+
+ // Register iSAC codec in ACM, effectively unregistering the PCM16B codec
+ // registered in AudioCodingModuleTest::SetUp();
+ ASSERT_EQ(0, acm_->RegisterReceiveCodec(codec_));
+ ASSERT_EQ(0, acm_->RegisterSendCodec(codec_));
+ }
+
+ void InsertPacket() {
+ int num_calls = packet_cb_.num_calls(); // Store locally for thread safety.
+ if (num_calls > last_packet_number_) {
+ // Get the new payload out from the callback handler.
+ // Note that since we swap buffers here instead of directly inserting
+ // a pointer to the data in |packet_cb_|, we avoid locking the callback
+ // for the duration of the IncomingPacket() call.
+ packet_cb_.SwapBuffers(&last_payload_vec_);
+ ASSERT_GT(last_payload_vec_.size(), 0u);
+ rtp_utility_->Forward(&rtp_header_);
+ last_packet_number_ = num_calls;
+ }
+ ASSERT_GT(last_payload_vec_.size(), 0u);
+ ASSERT_EQ(
+ 0,
+ acm_->IncomingPacket(
+ &last_payload_vec_[0], last_payload_vec_.size(), rtp_header_));
+ }
+
+ void InsertAudio() {
+ memcpy(input_frame_.data_, audio_loop_.GetNextBlock(), kNumSamples10ms);
+ AudioCodingModuleTest::InsertAudio();
+ }
+
+ void Encode() { ASSERT_GE(acm_->Process(), 0); }
+
+ // This method is the same as AudioCodingModuleMtTest::TestDone(), but here
+ // it is using the constants defined in this class (i.e., shorter test run).
+ virtual bool TestDone() {
+ if (packet_cb_.num_calls() > kNumPackets) {
+ CriticalSectionScoped lock(crit_sect_.get());
+ if (pull_audio_count_ > kNumPullCalls) {
+ // Both conditions for completion are met. End the test.
+ return true;
+ }
+ }
+ return false;
+ }
+
+ int last_packet_number_;
+ std::vector<uint8_t> last_payload_vec_;
+ test::AudioLoop audio_loop_;
+};
+
+TEST_F(AcmIsacMtTest, DoTest) {
+ EXPECT_EQ(kEventSignaled, RunTest());
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/call_statistics.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/call_statistics.cc
index 9153325afaf..4c3e9fc3939 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/call_statistics.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/call_statistics.cc
@@ -10,7 +10,7 @@
#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h"
-#include <cassert>
+#include <assert.h>
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.cc
index c2b218cb6cf..786fb2e5275 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.cc
@@ -219,6 +219,14 @@ void InitialDelayManager::LatePackets(
return;
}
+bool InitialDelayManager::GetPlayoutTimestamp(uint32_t* playout_timestamp) {
+ if (!buffering_) {
+ return false;
+ }
+ *playout_timestamp = playout_timestamp_;
+ return true;
+}
+
void InitialDelayManager::DisableBuffering() {
buffering_ = false;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h
index 3c5ba3c0139..6edc1150843 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h
@@ -65,8 +65,9 @@ class InitialDelayManager {
// sequence of late (or perhaps missing) packets is computed.
void LatePackets(uint32_t timestamp_now, SyncStream* sync_stream);
- // Playout timestamp, valid when buffering.
- uint32_t playout_timestamp() { return playout_timestamp_; }
+ // Get playout timestamp.
+ // Returns true if the timestamp is valid (when buffering), otherwise false.
+ bool GetPlayoutTimestamp(uint32_t* playout_timestamp);
// True if buffered audio is less than the given initial delay (specified at
// the constructor). Buffering might be disabled by the client of this class.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager_unittest.cc
index 1e129f37e90..38b7cfc2714 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/initial_delay_manager_unittest.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include <cstring>
+#include <string.h>
#include "gtest/gtest.h"
#include "webrtc/modules/audio_coding/main/acm2/initial_delay_manager.h"
@@ -359,7 +359,9 @@ TEST_F(InitialDelayManagerTest, BufferingAudio) {
EXPECT_TRUE(manager_->buffering());
const uint32_t expected_playout_timestamp = rtp_info_.header.timestamp -
kInitDelayMs * kSamplingRateHz / 1000;
- EXPECT_EQ(expected_playout_timestamp, manager_->playout_timestamp());
+ uint32_t actual_playout_timestamp = 0;
+ EXPECT_TRUE(manager_->GetPlayoutTimestamp(&actual_playout_timestamp));
+ EXPECT_EQ(expected_playout_timestamp, actual_playout_timestamp);
NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/nack_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/nack_unittest.cc
index 8011d8856c0..5837c31a899 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/acm2/nack_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/acm2/nack_unittest.cc
@@ -398,7 +398,7 @@ TEST(NackTest, ChangeOfListSizeAppliedAndOldElementsRemoved) {
// Packet lost more than NACK-list size limit.
uint16_t num_lost_packets = kNackThreshold + kNackListSize + 5;
- scoped_array<uint16_t> seq_num_lost(new uint16_t[num_lost_packets]);
+ scoped_ptr<uint16_t[]> seq_num_lost(new uint16_t[num_lost_packets]);
for (int n = 0; n < num_lost_packets; ++n) {
seq_num_lost[n] = ++seq_num;
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/interface/audio_coding_module.h b/chromium/third_party/webrtc/modules/audio_coding/main/interface/audio_coding_module.h
index db45addde22..cb0953aa400 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/main/interface/audio_coding_module.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/main/interface/audio_coding_module.h
@@ -15,7 +15,9 @@
#include "webrtc/common_types.h"
#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
#include "webrtc/modules/interface/module.h"
+#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -25,7 +27,6 @@ struct CodecInst;
struct WebRtcRTPHeader;
class AudioFrame;
class RTPFragmentationHeader;
-class Clock;
#define WEBRTC_10MS_PCM_AUDIO 960 // 16 bits super wideband 48 kHz
@@ -73,15 +74,22 @@ class ACMVQMonCallback {
const uint16_t delayMS) = 0; // average delay in ms
};
-// Version string for testing, to distinguish instances of ACM1 from ACM2.
-extern const char kLegacyAcmVersion[];
-extern const char kExperimentalAcmVersion[];
-
class AudioCodingModule: public Module {
protected:
AudioCodingModule() {}
public:
+ struct Config {
+ Config()
+ : id(0),
+ neteq_config(),
+ clock(Clock::GetRealTimeClock()) {}
+
+ int id;
+ NetEq::Config neteq_config;
+ Clock* clock;
+ };
+
///////////////////////////////////////////////////////////////////////////
// Creation and destruction of a ACM.
//
@@ -178,11 +186,6 @@ class AudioCodingModule: public Module {
//
static bool IsCodecValid(const CodecInst& codec);
- // Returns the version of ACM. This facilitates distinguishing instances of
- // ACM1 from ACM2 while testing. This API will be removed when ACM1 is
- // completely removed.
- virtual const char* Version() const = 0;
-
///////////////////////////////////////////////////////////////////////////
// Sender
//
@@ -370,12 +373,12 @@ class AudioCodingModule: public Module {
virtual int32_t Add10MsData(const AudioFrame& audio_frame) = 0;
///////////////////////////////////////////////////////////////////////////
- // (FEC) Forward Error Correction
+ // (RED) Redundant Coding
//
///////////////////////////////////////////////////////////////////////////
- // int32_t SetFECStatus(const bool enable)
- // configure FEC status i.e. on/off.
+ // int32_t SetREDStatus()
+ // configure RED status i.e. on/off.
//
// RFC 2198 describes a solution which has a single payload type which
// signifies a packet with redundancy. That packet then becomes a container,
@@ -385,27 +388,69 @@ class AudioCodingModule: public Module {
// since each encapsulated payload must be preceded by a header indicating
// the type of data enclosed.
//
- // This means that FEC is actually a RED scheme.
+ // Input:
+ // -enable_red : if true RED is enabled, otherwise RED is
+ // disabled.
+ //
+ // Return value:
+ // -1 if failed to set RED status,
+ // 0 if succeeded.
+ //
+ virtual int32_t SetREDStatus(bool enable_red) = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // bool REDStatus()
+ // Get RED status
+ //
+ // Return value:
+ // true if RED is enabled,
+ // false if RED is disabled.
+ //
+ virtual bool REDStatus() const = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // (FEC) Forward Error Correction (codec internal)
+ //
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int32_t SetCodecFEC()
+ // Configures codec internal FEC status i.e. on/off. No effects on codecs that
+ // do not provide internal FEC.
//
// Input:
- // -enable_fec : if true FEC is enabled, otherwise FEC is
+ // -enable_fec : if true FEC will be enabled otherwise the FEC is
// disabled.
//
// Return value:
- // -1 if failed to set FEC status,
+ // -1 if failed, or the codec does not support FEC
// 0 if succeeded.
//
- virtual int32_t SetFECStatus(const bool enable_fec) = 0;
+ virtual int SetCodecFEC(bool enable_codec_fec) = 0;
///////////////////////////////////////////////////////////////////////////
- // bool FECStatus()
- // Get FEC status
+ // bool CodecFEC()
+ // Gets status of codec internal FEC.
//
- // Return value
+ // Return value:
// true if FEC is enabled,
// false if FEC is disabled.
//
- virtual bool FECStatus() const = 0;
+ virtual bool CodecFEC() const = 0;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // int SetPacketLossRate()
+ // Sets expected packet loss rate for encoding. Some encoders provide packet
+ // loss gnostic encoding to make stream less sensitive to packet losses,
+ // through e.g., FEC. No effects on codecs that do not provide such encoding.
+ //
+ // Input:
+ // -packet_loss_rate : expected packet loss rate (0 -- 100 inclusive).
+ //
+ // Return value
+ // -1 if failed to set packet loss rate,
+ // 0 if succeeded.
+ //
+ virtual int SetPacketLossRate(int packet_loss_rate) = 0;
///////////////////////////////////////////////////////////////////////////
// (VAD) Voice Activity Detection
@@ -936,20 +981,6 @@ class AudioCodingModule: public Module {
AudioDecodingCallStats* call_stats) const = 0;
};
-struct AudioCodingModuleFactory {
- AudioCodingModuleFactory() {}
- virtual ~AudioCodingModuleFactory() {}
-
- virtual AudioCodingModule* Create(int id) const;
-};
-
-struct NewAudioCodingModuleFactory : AudioCodingModuleFactory {
- NewAudioCodingModuleFactory() {}
- virtual ~NewAudioCodingModuleFactory() {}
-
- virtual AudioCodingModule* Create(int id) const;
-};
-
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_INTERFACE_AUDIO_CODING_MODULE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/Android.mk b/chromium/third_party/webrtc/modules/audio_coding/main/source/Android.mk
deleted file mode 100644
index 90214a9c408..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/Android.mk
+++ /dev/null
@@ -1,67 +0,0 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-
-include $(LOCAL_PATH)/../../../../../android-webrtc.mk
-
-LOCAL_ARM_MODE := arm
-LOCAL_MODULE_CLASS := STATIC_LIBRARIES
-LOCAL_MODULE := libwebrtc_audio_coding
-LOCAL_MODULE_TAGS := optional
-LOCAL_CPP_EXTENSION := .cc
-LOCAL_SRC_FILES := \
- acm_cng.cc \
- acm_codec_database.cc \
- acm_dtmf_detection.cc \
- acm_dtmf_playout.cc \
- acm_g722.cc \
- acm_generic_codec.cc \
- acm_ilbc.cc \
- acm_isac.cc \
- acm_neteq.cc \
- acm_pcm16b.cc \
- acm_pcma.cc \
- acm_pcmu.cc \
- acm_red.cc \
- acm_resampler.cc \
- audio_coding_module.cc \
- audio_coding_module_impl.cc
-
-# Flags passed to both C and C++ files.
-LOCAL_CFLAGS := \
- $(MY_WEBRTC_COMMON_DEFS)
-
-LOCAL_C_INCLUDES := \
- $(LOCAL_PATH)/../interface \
- $(LOCAL_PATH)/../../codecs/cng/include \
- $(LOCAL_PATH)/../../codecs/g711/include \
- $(LOCAL_PATH)/../../codecs/g722/include \
- $(LOCAL_PATH)/../../codecs/ilbc/interface \
- $(LOCAL_PATH)/../../codecs/iSAC/main/interface \
- $(LOCAL_PATH)/../../codecs/iSAC/fix/interface \
- $(LOCAL_PATH)/../../codecs/pcm16b/include \
- $(LOCAL_PATH)/../../neteq/interface \
- $(LOCAL_PATH)/../../../.. \
- $(LOCAL_PATH)/../../../interface \
- $(LOCAL_PATH)/../../../../common_audio/resampler/include \
- $(LOCAL_PATH)/../../../../common_audio/signal_processing/include \
- $(LOCAL_PATH)/../../../../common_audio/vad/include \
- $(LOCAL_PATH)/../../../../system_wrappers/interface
-
-LOCAL_SHARED_LIBRARIES := \
- libcutils \
- libdl \
- libstlport
-
-ifndef NDK_ROOT
-include external/stlport/libstlport.mk
-endif
-include $(BUILD_STATIC_LIBRARY)
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amr.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amr.cc
deleted file mode 100644
index d398607789b..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amr.cc
+++ /dev/null
@@ -1,430 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_amr.h"
-
-#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_AMR
-// NOTE! GSM AMR is not included in the open-source package. The following
-// interface file is needed:
-//
-// /modules/audio_coding/codecs/amr/main/interface/amr_interface.h
-//
-// The API in the header file should match the one below.
-//
-// int16_t WebRtcAmr_CreateEnc(AMR_encinst_t_** enc_inst);
-// int16_t WebRtcAmr_CreateDec(AMR_decinst_t_** dec_inst);
-// int16_t WebRtcAmr_FreeEnc(AMR_encinst_t_* enc_inst);
-// int16_t WebRtcAmr_FreeDec(AMR_decinst_t_* dec_inst);
-// int16_t WebRtcAmr_Encode(AMR_encinst_t_* enc_inst,
-// int16_t* input,
-// int16_t len,
-// int16_t*output,
-// int16_t mode);
-// int16_t WebRtcAmr_EncoderInit(AMR_encinst_t_* enc_inst,
-// int16_t dtx_mode);
-// int16_t WebRtcAmr_EncodeBitmode(AMR_encinst_t_* enc_inst,
-// int format);
-// int16_t WebRtcAmr_Decode(AMR_decinst_t_* dec_inst);
-// int16_t WebRtcAmr_DecodePlc(AMR_decinst_t_* dec_inst);
-// int16_t WebRtcAmr_DecoderInit(AMR_decinst_t_* dec_inst);
-// int16_t WebRtcAmr_DecodeBitmode(AMR_decinst_t_* dec_inst,
-// int format);
-#include "amr_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_AMR
-ACMAMR::ACMAMR(int16_t /* codec_id */)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL),
- encoding_mode_(-1), // Invalid value.
- encoding_rate_(0), // Invalid value.
- encoder_packing_format_(AMRBandwidthEfficient),
- decoder_packing_format_(AMRBandwidthEfficient) {
- return;
-}
-
-ACMAMR::~ACMAMR() {
- return;
-}
-
-int16_t ACMAMR::InternalEncode(uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMAMR::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMAMR::EnableDTX() {
- return -1;
-}
-
-int16_t ACMAMR::DisableDTX() {
- return -1;
-}
-
-int16_t ACMAMR::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMAMR::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMAMR::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMAMR::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMAMR::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMAMR::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMAMR::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMAMR::DestructDecoderSafe() {
- return;
-}
-
-int16_t ACMAMR::SetBitRateSafe(const int32_t /* rate */) {
- return -1;
-}
-
-void ACMAMR::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-int16_t ACMAMR::SetAMREncoderPackingFormat(
- ACMAMRPackingFormat /* packing_format */) {
- return -1;
-}
-
-ACMAMRPackingFormat ACMAMR::AMREncoderPackingFormat() const {
- return AMRUndefined;
-}
-
-int16_t ACMAMR::SetAMRDecoderPackingFormat(
- ACMAMRPackingFormat /* packing_format */) {
- return -1;
-}
-
-ACMAMRPackingFormat ACMAMR::AMRDecoderPackingFormat() const {
- return AMRUndefined;
-}
-
-#else //===================== Actual Implementation =======================
-
-#define WEBRTC_AMR_MR475 0
-#define WEBRTC_AMR_MR515 1
-#define WEBRTC_AMR_MR59 2
-#define WEBRTC_AMR_MR67 3
-#define WEBRTC_AMR_MR74 4
-#define WEBRTC_AMR_MR795 5
-#define WEBRTC_AMR_MR102 6
-#define WEBRTC_AMR_MR122 7
-
-ACMAMR::ACMAMR(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL),
- encoding_mode_(-1), // invalid value
- encoding_rate_(0) { // invalid value
- codec_id_ = codec_id;
- has_internal_dtx_ = true;
- encoder_packing_format_ = AMRBandwidthEfficient;
- decoder_packing_format_ = AMRBandwidthEfficient;
- return;
-}
-
-ACMAMR::~ACMAMR() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcAmr_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- WebRtcAmr_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- return;
-}
-
-int16_t ACMAMR::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- int16_t vad_decision = 1;
- // sanity check, if the rate is set correctly. we might skip this
- // sanity check. if rate is not set correctly, initialization flag
- // should be false and should not be here.
- if ((encoding_mode_ < WEBRTC_AMR_MR475) ||
- (encoding_mode_ > WEBRTC_AMR_MR122)) {
- *bitstream_len_byte = 0;
- return -1;
- }
- *bitstream_len_byte = WebRtcAmr_Encode(encoder_inst_ptr_,
- &in_audio_[in_audio_ix_read_],
- frame_len_smpl_,
- (int16_t*)bitstream,
- encoding_mode_);
-
- // Update VAD, if internal DTX is used
- if (has_internal_dtx_ && dtx_enabled_) {
- if (*bitstream_len_byte <= (7 * frame_len_smpl_ / 160)) {
- vad_decision = 0;
- }
- for (int16_t n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
- vad_label_[n] = vad_decision;
- }
- }
- // increment the read index
- in_audio_ix_read_ += frame_len_smpl_;
- return *bitstream_len_byte;
-}
-
-int16_t ACMAMR::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMAMR::EnableDTX() {
- if (dtx_enabled_) {
- return 0;
- } else if (encoder_exist_) { // check if encoder exist
- // enable DTX
- if (WebRtcAmr_EncoderInit(encoder_inst_ptr_, 1) < 0) {
- return -1;
- }
- dtx_enabled_ = true;
- return 0;
- } else {
- return -1;
- }
-}
-
-int16_t ACMAMR::DisableDTX() {
- if (!dtx_enabled_) {
- return 0;
- } else if (encoder_exist_) { // check if encoder exist
- // disable DTX
- if (WebRtcAmr_EncoderInit(encoder_inst_ptr_, 0) < 0) {
- return -1;
- }
- dtx_enabled_ = false;
- return 0;
- } else {
- // encoder doesn't exists, therefore disabling is harmless
- return 0;
- }
-}
-
-int16_t ACMAMR::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
- int16_t status = SetBitRateSafe((codec_params->codec_inst).rate);
- status += (WebRtcAmr_EncoderInit(
- encoder_inst_ptr_, ((codec_params->enable_dtx) ? 1 : 0)) < 0) ? -1 : 0;
- status += (WebRtcAmr_EncodeBitmode(
- encoder_inst_ptr_, encoder_packing_format_) < 0) ? -1 : 0;
- return (status < 0) ? -1 : 0;
-}
-
-int16_t ACMAMR::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- int16_t status =
- ((WebRtcAmr_DecoderInit(decoder_inst_ptr_) < 0) ? -1 : 0);
- status += WebRtcAmr_DecodeBitmode(decoder_inst_ptr_, decoder_packing_format_);
- return (status < 0) ? -1 : 0;
-}
-
-int32_t ACMAMR::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- // Todo:
- // log error
- return -1;
- }
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_AMR_FUNCTION."
- // Then call NetEQ to add the codec to it's
- // database.
- SET_CODEC_PAR((codec_def), kDecoderAMR, codec_inst.pltype, decoder_inst_ptr_,
- 8000);
- SET_AMR_FUNCTIONS((codec_def));
- return 0;
-}
-
-ACMGenericCodec* ACMAMR::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMAMR::InternalCreateEncoder() {
- return WebRtcAmr_CreateEnc(&encoder_inst_ptr_);
-}
-
-void ACMAMR::DestructEncoderSafe() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcAmr_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- // there is no encoder set the following
- encoder_exist_ = false;
- encoder_initialized_ = false;
- encoding_mode_ = -1; // invalid value
- encoding_rate_ = 0; // invalid value
-}
-
-int16_t ACMAMR::InternalCreateDecoder() {
- return WebRtcAmr_CreateDec(&decoder_inst_ptr_);
-}
-
-void ACMAMR::DestructDecoderSafe() {
- if (decoder_inst_ptr_ != NULL) {
- WebRtcAmr_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- // there is no encoder instance set the followings
- decoder_exist_ = false;
- decoder_initialized_ = false;
-}
-
-int16_t ACMAMR::SetBitRateSafe(const int32_t rate) {
- switch (rate) {
- case 4750: {
- encoding_mode_ = WEBRTC_AMR_MR475;
- encoding_rate_ = 4750;
- break;
- }
- case 5150: {
- encoding_mode_ = WEBRTC_AMR_MR515;
- encoding_rate_ = 5150;
- break;
- }
- case 5900: {
- encoding_mode_ = WEBRTC_AMR_MR59;
- encoding_rate_ = 5900;
- break;
- }
- case 6700: {
- encoding_mode_ = WEBRTC_AMR_MR67;
- encoding_rate_ = 6700;
- break;
- }
- case 7400: {
- encoding_mode_ = WEBRTC_AMR_MR74;
- encoding_rate_ = 7400;
- break;
- }
- case 7950: {
- encoding_mode_ = WEBRTC_AMR_MR795;
- encoding_rate_ = 7950;
- break;
- }
- case 10200: {
- encoding_mode_ = WEBRTC_AMR_MR102;
- encoding_rate_ = 10200;
- break;
- }
- case 12200: {
- encoding_mode_ = WEBRTC_AMR_MR122;
- encoding_rate_ = 12200;
- break;
- }
- default: {
- return -1;
- }
- }
- return 0;
-}
-
-void ACMAMR::InternalDestructEncoderInst(void* ptr_inst) {
- // Free the memory where ptr_inst is pointing to
- if (ptr_inst != NULL) {
- WebRtcAmr_FreeEnc(reinterpret_cast<AMR_encinst_t_*>(ptr_inst));
- }
- return;
-}
-
-int16_t ACMAMR::SetAMREncoderPackingFormat(
- ACMAMRPackingFormat packing_format) {
- if ((packing_format != AMRBandwidthEfficient) &&
- (packing_format != AMROctetAlligned) &&
- (packing_format != AMRFileStorage)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Invalid AMR Encoder packing-format.");
- return -1;
- } else {
- if (WebRtcAmr_EncodeBitmode(encoder_inst_ptr_, packing_format) < 0) {
- return -1;
- } else {
- encoder_packing_format_ = packing_format;
- return 0;
- }
- }
-}
-
-ACMAMRPackingFormat ACMAMR::AMREncoderPackingFormat() const {
- return encoder_packing_format_;
-}
-
-int16_t ACMAMR::SetAMRDecoderPackingFormat(
- ACMAMRPackingFormat packing_format) {
- if ((packing_format != AMRBandwidthEfficient) &&
- (packing_format != AMROctetAlligned) &&
- (packing_format != AMRFileStorage)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Invalid AMR decoder packing-format.");
- return -1;
- } else {
- if (WebRtcAmr_DecodeBitmode(decoder_inst_ptr_, packing_format) < 0) {
- return -1;
- } else {
- decoder_packing_format_ = packing_format;
- return 0;
- }
- }
-}
-
-ACMAMRPackingFormat ACMAMR::AMRDecoderPackingFormat() const {
- return decoder_packing_format_;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amr.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amr.h
deleted file mode 100644
index 19c657246a2..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amr.h
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMR_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct AMR_encinst_t_;
-struct AMR_decinst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMAMR : public ACMGenericCodec {
- public:
- explicit ACMAMR(int16_t codec_id);
- virtual ~ACMAMR();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- int16_t SetAMREncoderPackingFormat(const ACMAMRPackingFormat packing_format);
-
- ACMAMRPackingFormat AMREncoderPackingFormat() const;
-
- int16_t SetAMRDecoderPackingFormat(const ACMAMRPackingFormat packing_format);
-
- ACMAMRPackingFormat AMRDecoderPackingFormat() const;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- virtual int16_t SetBitRateSafe(const int32_t rate) OVERRIDE;
-
- virtual int16_t EnableDTX() OVERRIDE;
-
- virtual int16_t DisableDTX() OVERRIDE;
-
- AMR_encinst_t_* encoder_inst_ptr_;
- AMR_decinst_t_* decoder_inst_ptr_;
- int16_t encoding_mode_;
- int16_t encoding_rate_;
- ACMAMRPackingFormat encoder_packing_format_;
- ACMAMRPackingFormat decoder_packing_format_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amrwb.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amrwb.cc
deleted file mode 100644
index 8b1b58d03cf..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amrwb.cc
+++ /dev/null
@@ -1,436 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_amrwb.h"
-
-#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_AMRWB
-// NOTE! GSM AMR-wb is not included in the open-source package. The
-// following interface file is needed:
-//
-// /modules/audio_coding/codecs/amrwb/main/interface/amrwb_interface.h
-//
-// The API in the header file should match the one below.
-//
-// int16_t WebRtcAmrWb_CreateEnc(AMRWB_encinst_t_** enc_inst);
-// int16_t WebRtcAmrWb_CreateDec(AMRWB_decinst_t_** dec_inst);
-// int16_t WebRtcAmrWb_FreeEnc(AMRWB_encinst_t_* enc_inst);
-// int16_t WebRtcAmrWb_FreeDec(AMRWB_decinst_t_* dec_inst);
-// int16_t WebRtcAmrWb_Encode(AMRWB_encinst_t_* enc_inst, int16_t* input,
-// int16_t len, int16_t* output, int16_t mode);
-// int16_t WebRtcAmrWb_EncoderInit(AMRWB_encinst_t_* enc_inst,
-// int16_t dtx_mode);
-// int16_t WebRtcAmrWb_EncodeBitmode(AMRWB_encinst_t_* enc_inst,
-// int format);
-// int16_t WebRtcAmrWb_Decode(AMRWB_decinst_t_* dec_inst);
-// int16_t WebRtcAmrWb_DecodePlc(AMRWB_decinst_t_* dec_inst);
-// int16_t WebRtcAmrWb_DecoderInit(AMRWB_decinst_t_* dec_inst);
-// int16_t WebRtcAmrWb_DecodeBitmode(AMRWB_decinst_t_* dec_inst,
-// int format);
-#include "amrwb_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_AMRWB
-ACMAMRwb::ACMAMRwb(int16_t /* codec_id */)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL),
- encoding_mode_(-1), // invalid value
- encoding_rate_(0), // invalid value
- encoder_packing_format_(AMRBandwidthEfficient),
- decoder_packing_format_(AMRBandwidthEfficient) {
-}
-
-ACMAMRwb::~ACMAMRwb() {
-}
-
-int16_t ACMAMRwb::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMAMRwb::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMAMRwb::EnableDTX() {
- return -1;
-}
-
-int16_t ACMAMRwb::DisableDTX() {
- return -1;
-}
-
-int16_t ACMAMRwb::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMAMRwb::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMAMRwb::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec*
-ACMAMRwb::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMAMRwb::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMAMRwb::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMAMRwb::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMAMRwb::DestructDecoderSafe() {
- return;
-}
-
-int16_t ACMAMRwb::SetBitRateSafe(const int32_t /* rate */) {
- return -1;
-}
-
-void ACMAMRwb::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-int16_t ACMAMRwb::SetAMRwbEncoderPackingFormat(
- ACMAMRPackingFormat /* packing_format */) {
- return -1;
-}
-
-ACMAMRPackingFormat ACMAMRwb::AMRwbEncoderPackingFormat() const {
- return AMRUndefined;
-}
-
-int16_t ACMAMRwb::SetAMRwbDecoderPackingFormat(
- ACMAMRPackingFormat /* packing_format */) {
- return -1;
-}
-
-ACMAMRPackingFormat ACMAMRwb::AMRwbDecoderPackingFormat() const {
- return AMRUndefined;
-}
-
-#else //===================== Actual Implementation =======================
-
-#define AMRWB_MODE_7k 0
-#define AMRWB_MODE_9k 1
-#define AMRWB_MODE_12k 2
-#define AMRWB_MODE_14k 3
-#define AMRWB_MODE_16k 4
-#define AMRWB_MODE_18k 5
-#define AMRWB_MODE_20k 6
-#define AMRWB_MODE_23k 7
-#define AMRWB_MODE_24k 8
-
-ACMAMRwb::ACMAMRwb(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL),
- encoding_mode_(-1), // invalid value
- encoding_rate_(0) { // invalid value
- codec_id_ = codec_id;
- has_internal_dtx_ = true;
- encoder_packing_format_ = AMRBandwidthEfficient;
- decoder_packing_format_ = AMRBandwidthEfficient;
- return;
-}
-
-ACMAMRwb::~ACMAMRwb() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcAmrWb_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- WebRtcAmrWb_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- return;
-}
-
-int16_t ACMAMRwb::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- int16_t vad_decision = 1;
- // sanity check, if the rate is set correctly. we might skip this
- // sanity check. if rate is not set correctly, initialization flag
- // should be false and should not be here.
- if ((encoding_mode_ < AMRWB_MODE_7k) || (encoding_mode_ > AMRWB_MODE_24k)) {
- *bitstream_len_byte = 0;
- return -1;
- }
- *bitstream_len_byte = WebRtcAmrWb_Encode(encoder_inst_ptr_,
- &in_audio_[in_audio_ix_read_],
- frame_len_smpl_,
- (int16_t*)bitstream,
- encoding_mode_);
-
- // Update VAD, if internal DTX is used
- if (has_internal_dtx_ && dtx_enabled_) {
- if (*bitstream_len_byte <= (7 * frame_len_smpl_ / 160)) {
- vad_decision = 0;
- }
- for (int16_t n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
- vad_label_[n] = vad_decision;
- }
- }
- // increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += frame_len_smpl_;
- return *bitstream_len_byte;
-}
-
-int16_t ACMAMRwb::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMAMRwb::EnableDTX() {
- if (dtx_enabled_) {
- return 0;
- } else if (encoder_exist_) { // check if encoder exist
- // enable DTX
- if (WebRtcAmrWb_EncoderInit(encoder_inst_ptr_, 1) < 0) {
- return -1;
- }
- dtx_enabled_ = true;
- return 0;
- } else {
- return -1;
- }
-}
-
-int16_t ACMAMRwb::DisableDTX() {
- if (!dtx_enabled_) {
- return 0;
- } else if (encoder_exist_) { // check if encoder exist
- // disable DTX
- if (WebRtcAmrWb_EncoderInit(encoder_inst_ptr_, 0) < 0) {
- return -1;
- }
- dtx_enabled_ = false;
- return 0;
- } else {
- // encoder doesn't exists, therefore disabling is harmless
- return 0;
- }
-}
-
-int16_t ACMAMRwb::InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) {
- // sanity check
- if (encoder_inst_ptr_ == NULL) {
- return -1;
- }
-
- int16_t status = SetBitRateSafe((codec_params->codec_inst).rate);
- status += (WebRtcAmrWb_EncoderInit(
- encoder_inst_ptr_, ((codec_params->enable_dtx) ? 1 : 0)) < 0) ? -1 : 0;
- status += (WebRtcAmrWb_EncodeBitmode(
- encoder_inst_ptr_, encoder_packing_format_) < 0) ? -1 : 0;
- return (status < 0) ? -1 : 0;
-}
-
-int16_t ACMAMRwb::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- int16_t status = WebRtcAmrWb_DecodeBitmode(decoder_inst_ptr_,
- decoder_packing_format_);
- status += ((WebRtcAmrWb_DecoderInit(decoder_inst_ptr_) < 0) ? -1 : 0);
- return (status < 0) ? -1 : 0;
-}
-
-int32_t ACMAMRwb::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- return -1;
- }
-
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_AMRWB_FUNCTION."
- // Then call NetEQ to add the codec to it's
- // database.
- SET_CODEC_PAR((codec_def), kDecoderAMRWB, codec_inst.pltype,
- decoder_inst_ptr_, 16000);
- SET_AMRWB_FUNCTIONS((codec_def));
- return 0;
-}
-
-ACMGenericCodec* ACMAMRwb::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMAMRwb::InternalCreateEncoder() {
- return WebRtcAmrWb_CreateEnc(&encoder_inst_ptr_);
-}
-
-void ACMAMRwb::DestructEncoderSafe() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcAmrWb_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- // there is no encoder set the following
- encoder_exist_ = false;
- encoder_initialized_ = false;
- encoding_mode_ = -1; // invalid value
- encoding_rate_ = 0;
-}
-
-int16_t ACMAMRwb::InternalCreateDecoder() {
- return WebRtcAmrWb_CreateDec(&decoder_inst_ptr_);
-}
-
-void ACMAMRwb::DestructDecoderSafe() {
- if (decoder_inst_ptr_ != NULL) {
- WebRtcAmrWb_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- // there is no encoder instance set the followings
- decoder_exist_ = false;
- decoder_initialized_ = false;
-}
-
-int16_t ACMAMRwb::SetBitRateSafe(const int32_t rate) {
- switch (rate) {
- case 7000: {
- encoding_mode_ = AMRWB_MODE_7k;
- encoding_rate_ = 7000;
- break;
- }
- case 9000: {
- encoding_mode_ = AMRWB_MODE_9k;
- encoding_rate_ = 9000;
- break;
- }
- case 12000: {
- encoding_mode_ = AMRWB_MODE_12k;
- encoding_rate_ = 12000;
- break;
- }
- case 14000: {
- encoding_mode_ = AMRWB_MODE_14k;
- encoding_rate_ = 14000;
- break;
- }
- case 16000: {
- encoding_mode_ = AMRWB_MODE_16k;
- encoding_rate_ = 16000;
- break;
- }
- case 18000: {
- encoding_mode_ = AMRWB_MODE_18k;
- encoding_rate_ = 18000;
- break;
- }
- case 20000: {
- encoding_mode_ = AMRWB_MODE_20k;
- encoding_rate_ = 20000;
- break;
- }
- case 23000: {
- encoding_mode_ = AMRWB_MODE_23k;
- encoding_rate_ = 23000;
- break;
- }
- case 24000: {
- encoding_mode_ = AMRWB_MODE_24k;
- encoding_rate_ = 24000;
- break;
- }
- default: {
- return -1;
- }
- }
- return 0;
-}
-
-void ACMAMRwb::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WebRtcAmrWb_FreeEnc(static_cast<AMRWB_encinst_t_*>(ptr_inst));
- }
- return;
-}
-
-int16_t ACMAMRwb::SetAMRwbEncoderPackingFormat(
- ACMAMRPackingFormat packing_format) {
- if ((packing_format != AMRBandwidthEfficient) &&
- (packing_format != AMROctetAlligned) &&
- (packing_format != AMRFileStorage)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Invalid AMRwb encoder packing-format.");
- return -1;
- } else {
- if (WebRtcAmrWb_EncodeBitmode(encoder_inst_ptr_, packing_format) < 0) {
- return -1;
- } else {
- encoder_packing_format_ = packing_format;
- return 0;
- }
- }
-}
-
-ACMAMRPackingFormat ACMAMRwb::AMRwbEncoderPackingFormat() const {
- return encoder_packing_format_;
-}
-
-int16_t ACMAMRwb::SetAMRwbDecoderPackingFormat(
- ACMAMRPackingFormat packing_format) {
- if ((packing_format != AMRBandwidthEfficient) &&
- (packing_format != AMROctetAlligned) &&
- (packing_format != AMRFileStorage)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Invalid AMRwb decoder packing-format.");
- return -1;
- } else {
- if (WebRtcAmrWb_DecodeBitmode(decoder_inst_ptr_, packing_format) < 0) {
- return -1;
- } else {
- decoder_packing_format_ = packing_format;
- return 0;
- }
- }
-}
-
-ACMAMRPackingFormat ACMAMRwb::AMRwbDecoderPackingFormat() const {
- return decoder_packing_format_;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amrwb.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amrwb.h
deleted file mode 100644
index 25934187e55..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_amrwb.h
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMRWB_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMRWB_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct AMRWB_encinst_t_;
-struct AMRWB_decinst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMAMRwb : public ACMGenericCodec {
- public:
- explicit ACMAMRwb(int16_t codec_id);
- virtual ~ACMAMRwb();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t SetAMRwbEncoderPackingFormat(
- const ACMAMRPackingFormat packing_format);
-
- virtual ACMAMRPackingFormat AMRwbEncoderPackingFormat() const;
-
- virtual int16_t SetAMRwbDecoderPackingFormat(
- const ACMAMRPackingFormat packing_format);
-
- virtual ACMAMRPackingFormat AMRwbDecoderPackingFormat() const;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- virtual int16_t SetBitRateSafe(const int32_t rate) OVERRIDE;
-
- virtual int16_t EnableDTX() OVERRIDE;
-
- virtual int16_t DisableDTX() OVERRIDE;
-
- AMRWB_encinst_t_* encoder_inst_ptr_;
- AMRWB_decinst_t_* decoder_inst_ptr_;
-
- int16_t encoding_mode_;
- int16_t encoding_rate_;
- ACMAMRPackingFormat encoder_packing_format_;
- ACMAMRPackingFormat decoder_packing_format_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMRWB_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_celt.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_celt.cc
deleted file mode 100644
index 3b838143d4a..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_celt.cc
+++ /dev/null
@@ -1,339 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_celt.h"
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_CELT
-// NOTE! Celt is not included in the open-source package. Modify this file or
-// your codec API to match the function call and name of used Celt API file.
-#include "celt_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_CELT
-
-ACMCELT::ACMCELT(int16_t /* codec_id */)
- : enc_inst_ptr_(NULL),
- dec_inst_ptr_(NULL),
- sampling_freq_(0),
- bitrate_(0),
- channels_(1),
- dec_channels_(1) {
- return;
-}
-
-ACMCELT::~ACMCELT() {
- return;
-}
-
-int16_t ACMCELT::InternalEncode(uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMCELT::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMCELT::InternalInitEncoder(WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMCELT::InternalInitDecoder(WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMCELT::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMCELT::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMCELT::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMCELT::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMCELT::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMCELT::DestructDecoderSafe() {
- return;
-}
-
-void ACMCELT::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-bool ACMCELT::IsTrueStereoCodec() {
- return true;
-}
-
-int16_t ACMCELT::SetBitRateSafe(const int32_t /*rate*/) {
- return -1;
-}
-
-void ACMCELT::SplitStereoPacket(uint8_t* /*payload*/,
- int32_t* /*payload_length*/) {}
-
-#else //===================== Actual Implementation =======================
-
-ACMCELT::ACMCELT(int16_t codec_id)
- : enc_inst_ptr_(NULL),
- dec_inst_ptr_(NULL),
- sampling_freq_(32000), // Default sampling frequency.
- bitrate_(64000), // Default rate.
- channels_(1), // Default send mono.
- dec_channels_(1) { // Default receive mono.
- // TODO(tlegrand): remove later when ACMGenericCodec has a new constructor.
- codec_id_ = codec_id;
-
- return;
-}
-
-ACMCELT::~ACMCELT() {
- if (enc_inst_ptr_ != NULL) {
- WebRtcCelt_FreeEnc(enc_inst_ptr_);
- enc_inst_ptr_ = NULL;
- }
- if (dec_inst_ptr_ != NULL) {
- WebRtcCelt_FreeDec(dec_inst_ptr_);
- dec_inst_ptr_ = NULL;
- }
- return;
-}
-
-int16_t ACMCELT::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- *bitstream_len_byte = 0;
-
- // Call Encoder.
- *bitstream_len_byte = WebRtcCelt_Encode(enc_inst_ptr_,
- &in_audio_[in_audio_ix_read_],
- bitstream);
-
- // Increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer.
- in_audio_ix_read_ += frame_len_smpl_ * channels_;
-
- if (*bitstream_len_byte < 0) {
- // Error reported from the encoder.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalEncode: Encode error for Celt");
- *bitstream_len_byte = 0;
- return -1;
- }
-
- return *bitstream_len_byte;
-}
-
-int16_t ACMCELT::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMCELT::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
- // Set bitrate and check that it is within the valid range.
- int16_t status = SetBitRateSafe((codec_params->codec_inst).rate);
- if (status < 0) {
- return -1;
- }
-
- // If number of channels changed we need to re-create memory.
- if (codec_params->codec_inst.channels != channels_) {
- WebRtcCelt_FreeEnc(enc_inst_ptr_);
- enc_inst_ptr_ = NULL;
- // Store new number of channels.
- channels_ = codec_params->codec_inst.channels;
- if (WebRtcCelt_CreateEnc(&enc_inst_ptr_, channels_) < 0) {
- return -1;
- }
- }
-
- // Initiate encoder.
- if (WebRtcCelt_EncoderInit(enc_inst_ptr_, channels_, bitrate_) >= 0) {
- return 0;
- } else {
- return -1;
- }
-}
-
-int16_t ACMCELT::InternalInitDecoder(WebRtcACMCodecParams* codec_params) {
- // If number of channels changed we need to re-create memory.
- if (codec_params->codec_inst.channels != dec_channels_) {
- WebRtcCelt_FreeDec(dec_inst_ptr_);
- dec_inst_ptr_ = NULL;
- // Store new number of channels.
- dec_channels_ = codec_params->codec_inst.channels;
- if (WebRtcCelt_CreateDec(&dec_inst_ptr_, dec_channels_) < 0) {
- return -1;
- }
- }
-
- // Initiate decoder, both master and slave parts.
- if (WebRtcCelt_DecoderInit(dec_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitDecoder: init decoder failed for Celt.");
- return -1;
- }
- if (WebRtcCelt_DecoderInitSlave(dec_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitDecoder: init decoder failed for Celt.");
- return -1;
- }
- return 0;
-}
-
-int32_t ACMCELT::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CodecDef: Decoder uninitialized for Celt");
- return -1;
- }
-
- // Fill up the structure by calling
- // "SET_CODEC_PAR" and "SET_CELT_FUNCTIONS" or "SET_CELTSLAVE_FUNCTIONS".
- // Then call NetEQ to add the codec to it's
- // database.
- if (codec_inst.channels == 1) {
- SET_CODEC_PAR(codec_def, kDecoderCELT_32, codec_inst.pltype, dec_inst_ptr_,
- 32000);
- } else {
- SET_CODEC_PAR(codec_def, kDecoderCELT_32_2ch, codec_inst.pltype,
- dec_inst_ptr_, 32000);
- }
-
- // If this is the master of NetEQ, regular decoder will be added, otherwise
- // the slave decoder will be used.
- if (is_master_) {
- SET_CELT_FUNCTIONS(codec_def);
- } else {
- SET_CELTSLAVE_FUNCTIONS(codec_def);
- }
- return 0;
-}
-
-ACMGenericCodec* ACMCELT::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMCELT::InternalCreateEncoder() {
- if (WebRtcCelt_CreateEnc(&enc_inst_ptr_, num_channels_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateEncoder: create encoder failed for Celt");
- return -1;
- }
- channels_ = num_channels_;
- return 0;
-}
-
-void ACMCELT::DestructEncoderSafe() {
- encoder_exist_ = false;
- encoder_initialized_ = false;
- if (enc_inst_ptr_ != NULL) {
- WebRtcCelt_FreeEnc(enc_inst_ptr_);
- enc_inst_ptr_ = NULL;
- }
-}
-
-int16_t ACMCELT::InternalCreateDecoder() {
- if (WebRtcCelt_CreateDec(&dec_inst_ptr_, dec_channels_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateDecoder: create decoder failed for Celt");
- return -1;
- }
-
- return 0;
-}
-
-void ACMCELT::DestructDecoderSafe() {
- decoder_exist_ = false;
- decoder_initialized_ = false;
- if (dec_inst_ptr_ != NULL) {
- WebRtcCelt_FreeDec(dec_inst_ptr_);
- dec_inst_ptr_ = NULL;
- }
-}
-
-void ACMCELT::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WebRtcCelt_FreeEnc(static_cast<CELT_encinst_t*>(ptr_inst));
- }
- return;
-}
-
-bool ACMCELT::IsTrueStereoCodec() {
- return true;
-}
-
-int16_t ACMCELT::SetBitRateSafe(const int32_t rate) {
- // Check that rate is in the valid range.
- if ((rate >= 48000) && (rate <= 128000)) {
- // Store new rate.
- bitrate_ = rate;
-
- // Initiate encoder with new rate.
- if (WebRtcCelt_EncoderInit(enc_inst_ptr_, channels_, bitrate_) >= 0) {
- return 0;
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "SetBitRateSafe: Failed to initiate Celt with rate %d",
- rate);
- return -1;
- }
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "SetBitRateSafe: Invalid rate Celt, %d", rate);
- return -1;
- }
-}
-
-// Copy the stereo packet so that NetEq will insert into both master and slave.
-void ACMCELT::SplitStereoPacket(uint8_t* payload, int32_t* payload_length) {
- // Check for valid inputs.
- assert(payload != NULL);
- assert(*payload_length > 0);
-
- // Duplicate the payload.
- memcpy(&payload[*payload_length], &payload[0],
- sizeof(uint8_t) * (*payload_length));
- // Double the size of the packet.
- *payload_length *= 2;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_celt.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_celt.h
deleted file mode 100644
index 4a4610e0d47..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_celt.h
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CELT_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CELT_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct CELT_encinst_t_;
-struct CELT_decinst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMCELT : public ACMGenericCodec {
- public:
- explicit ACMCELT(int16_t codec_id);
- virtual ~ACMCELT();
-
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- virtual bool IsTrueStereoCodec() OVERRIDE;
-
- virtual int16_t SetBitRateSafe(const int32_t rate) OVERRIDE;
-
- virtual void SplitStereoPacket(uint8_t* payload,
- int32_t* payload_length) OVERRIDE;
-
- CELT_encinst_t_* enc_inst_ptr_;
- CELT_decinst_t_* dec_inst_ptr_;
- uint16_t sampling_freq_;
- int32_t bitrate_;
- uint16_t channels_;
- uint16_t dec_channels_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CELT_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_cng.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_cng.cc
deleted file mode 100644
index 6f3a5057e0b..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_cng.cc
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_cng.h"
-
-#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-ACMCNG::ACMCNG(int16_t codec_id) {
- encoder_inst_ptr_ = NULL;
- decoder_inst_ptr_ = NULL;
- codec_id_ = codec_id;
- samp_freq_hz_ = ACMCodecDB::CodecFreq(codec_id_);
- return;
-}
-
-ACMCNG::~ACMCNG() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcCng_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- WebRtcCng_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- return;
-}
-
-// CNG is not like a regular encoder, this function
-// should not be called normally
-// instead the following function is called from inside
-// ACMGenericCodec::ProcessFrameVADDTX
-int16_t ACMCNG::InternalEncode(uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMCNG::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-// CNG is not like a regular encoder,
-// this function should not be called normally
-// instead the following function is called from inside
-// ACMGenericCodec::ProcessFrameVADDTX
-int16_t ACMCNG::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMCNG::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return WebRtcCng_InitDec(decoder_inst_ptr_);
-}
-
-int32_t ACMCNG::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- // TODO(tlegrand): log error
- return -1;
- }
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_CNG_FUNCTION."
- // Then return the structure back to NetEQ to add the codec to it's
- // database.
-
- if (samp_freq_hz_ == 8000 || samp_freq_hz_ == 16000 ||
- samp_freq_hz_ == 32000 || samp_freq_hz_ == 48000) {
- SET_CODEC_PAR((codec_def), kDecoderCNG, codec_inst.pltype,
- decoder_inst_ptr_, samp_freq_hz_);
- SET_CNG_FUNCTIONS((codec_def));
- return 0;
- } else {
- return -1;
- }
-}
-
-ACMGenericCodec* ACMCNG::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMCNG::InternalCreateEncoder() {
- if (WebRtcCng_CreateEnc(&encoder_inst_ptr_) < 0) {
- encoder_inst_ptr_ = NULL;
- return -1;
- } else {
- return 0;
- }
-}
-
-void ACMCNG::DestructEncoderSafe() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcCng_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- encoder_exist_ = false;
- encoder_initialized_ = false;
-}
-
-int16_t ACMCNG::InternalCreateDecoder() {
- if (WebRtcCng_CreateDec(&decoder_inst_ptr_) < 0) {
- decoder_inst_ptr_ = NULL;
- return -1;
- } else {
- return 0;
- }
-}
-
-void ACMCNG::DestructDecoderSafe() {
- if (decoder_inst_ptr_ != NULL) {
- WebRtcCng_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- decoder_exist_ = false;
- decoder_initialized_ = false;
-}
-
-void ACMCNG::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WebRtcCng_FreeEnc(static_cast<CNG_enc_inst*>(ptr_inst));
- }
- return;
-}
-
-int16_t ACMCNG::EnableDTX() { return -1; }
-int16_t ACMCNG::DisableDTX() { return -1; }
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_cng.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_cng.h
deleted file mode 100644
index 728312d55fb..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_cng.h
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CNG_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CNG_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct WebRtcCngEncInst;
-struct WebRtcCngDecInst;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMCNG: public ACMGenericCodec {
- public:
- explicit ACMCNG(int16_t codec_id);
- virtual ~ACMCNG();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- virtual int16_t EnableDTX() OVERRIDE;
- virtual int16_t DisableDTX() OVERRIDE;
-
- WebRtcCngEncInst* encoder_inst_ptr_;
- WebRtcCngDecInst* decoder_inst_ptr_;
- uint16_t samp_freq_hz_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CNG_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_codec_database.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_codec_database.cc
deleted file mode 100644
index 138effd6a9f..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_codec_database.cc
+++ /dev/null
@@ -1,956 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file generates databases with information about all supported audio
- * codecs.
- */
-
-// TODO(tlegrand): Change constant input pointers in all functions to constant
-// references, where appropriate.
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-// Includes needed to create the codecs.
-// G.711, PCM mu-law and A-law.
-#include "webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_pcma.h"
-#include "webrtc/modules/audio_coding/main/source/acm_pcmu.h"
-// CNG.
-#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
-#include "webrtc/modules/audio_coding/main/source/acm_cng.h"
-// NetEQ.
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#ifdef WEBRTC_CODEC_ISAC
-#include "webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h"
-#endif
-#ifdef WEBRTC_CODEC_ISACFX
-#include "webrtc/modules/audio_coding/codecs/isac/fix/interface/isacfix.h"
-#endif
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
-#include "webrtc/modules/audio_coding/main/source/acm_isac.h"
-#include "webrtc/modules/audio_coding/main/source/acm_isac_macros.h"
-#endif
-#ifdef WEBRTC_CODEC_PCM16
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
-#include "webrtc/modules/audio_coding/main/source/acm_pcm16b.h"
-#endif
-#ifdef WEBRTC_CODEC_ILBC
-#include "webrtc/modules/audio_coding/codecs/ilbc/interface/ilbc.h"
-#include "webrtc/modules/audio_coding/main/source/acm_ilbc.h"
-#endif
-#ifdef WEBRTC_CODEC_AMR
-#include "webrtc/modules/audio_coding/codecs/amr/include/amr_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_amr.h"
-#endif
-#ifdef WEBRTC_CODEC_AMRWB
-#include "webrtc/modules/audio_coding/codecs/amrwb/include/amrwb_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_amrwb.h"
-#endif
-#ifdef WEBRTC_CODEC_CELT
-#include "webrtc/modules/audio_coding/codecs/celt/include/celt_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_celt.h"
-#endif
-#ifdef WEBRTC_CODEC_G722
-#include "webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_g722.h"
-#endif
-#ifdef WEBRTC_CODEC_G722_1
-#include "webrtc/modules/audio_coding/codecs/g7221/include/g7221_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_g7221.h"
-#endif
-#ifdef WEBRTC_CODEC_G722_1C
-#include "webrtc/modules/audio_coding/codecs/g7221c/include/g7221c_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_g7221c.h"
-#endif
-#ifdef WEBRTC_CODEC_G729
-#include "webrtc/modules/audio_coding/codecs/g729/include/g729_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_g729.h"
-#endif
-#ifdef WEBRTC_CODEC_G729_1
-#include "webrtc/modules/audio_coding/codecs/g7291/include/g7291_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_g7291.h"
-#endif
-#ifdef WEBRTC_CODEC_GSMFR
-#include "webrtc/modules/audio_coding/codecs/gsmfr/include/gsmfr_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_gsmfr.h"
-#endif
-#ifdef WEBRTC_CODEC_OPUS
-#include "webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_opus.h"
-#endif
-#ifdef WEBRTC_CODEC_SPEEX
-#include "webrtc/modules/audio_coding/codecs/speex/include/speex_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_speex.h"
-#endif
-#ifdef WEBRTC_CODEC_AVT
-#include "webrtc/modules/audio_coding/main/source/acm_dtmf_playout.h"
-#endif
-#ifdef WEBRTC_CODEC_RED
-#include "webrtc/modules/audio_coding/main/source/acm_red.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-// Not yet used payload-types.
-// 83, 82, 81, 80, 79, 78, 77, 76, 75, 74, 73, 72, 71, 70, 69, 68,
-// 67, 66, 65
-
-const CodecInst ACMCodecDB::database_[] = {
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
- {103, "ISAC", 16000, kIsacPacSize480, 1, kIsacWbDefaultRate},
-# if (defined(WEBRTC_CODEC_ISAC))
- {104, "ISAC", 32000, kIsacPacSize960, 1, kIsacSwbDefaultRate},
- {105, "ISAC", 48000, kIsacPacSize1440, 1, kIsacSwbDefaultRate},
-# endif
-#endif
-#ifdef WEBRTC_CODEC_PCM16
- // Mono
- {107, "L16", 8000, 80, 1, 128000},
- {108, "L16", 16000, 160, 1, 256000},
- {109, "L16", 32000, 320, 1, 512000},
- // Stereo
- {111, "L16", 8000, 80, 2, 128000},
- {112, "L16", 16000, 160, 2, 256000},
- {113, "L16", 32000, 320, 2, 512000},
-#endif
- // G.711, PCM mu-law and A-law.
- // Mono
- {0, "PCMU", 8000, 160, 1, 64000},
- {8, "PCMA", 8000, 160, 1, 64000},
- // Stereo
- {110, "PCMU", 8000, 160, 2, 64000},
- {118, "PCMA", 8000, 160, 2, 64000},
-#ifdef WEBRTC_CODEC_ILBC
- {102, "ILBC", 8000, 240, 1, 13300},
-#endif
-#ifdef WEBRTC_CODEC_AMR
- {114, "AMR", 8000, 160, 1, 12200},
-#endif
-#ifdef WEBRTC_CODEC_AMRWB
- {115, "AMR-WB", 16000, 320, 1, 20000},
-#endif
-#ifdef WEBRTC_CODEC_CELT
- // Mono
- {116, "CELT", 32000, 640, 1, 64000},
- // Stereo
- {117, "CELT", 32000, 640, 2, 64000},
-#endif
-#ifdef WEBRTC_CODEC_G722
- // Mono
- {9, "G722", 16000, 320, 1, 64000},
- // Stereo
- {119, "G722", 16000, 320, 2, 64000},
-#endif
-#ifdef WEBRTC_CODEC_G722_1
- {92, "G7221", 16000, 320, 1, 32000},
- {91, "G7221", 16000, 320, 1, 24000},
- {90, "G7221", 16000, 320, 1, 16000},
-#endif
-#ifdef WEBRTC_CODEC_G722_1C
- {89, "G7221", 32000, 640, 1, 48000},
- {88, "G7221", 32000, 640, 1, 32000},
- {87, "G7221", 32000, 640, 1, 24000},
-#endif
-#ifdef WEBRTC_CODEC_G729
- {18, "G729", 8000, 240, 1, 8000},
-#endif
-#ifdef WEBRTC_CODEC_G729_1
- {86, "G7291", 16000, 320, 1, 32000},
-#endif
-#ifdef WEBRTC_CODEC_GSMFR
- {3, "GSM", 8000, 160, 1, 13200},
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- // Opus internally supports 48, 24, 16, 12, 8 kHz.
- // Mono and stereo.
- {120, "opus", 48000, 960, 2, 64000},
-#endif
-#ifdef WEBRTC_CODEC_SPEEX
- {85, "speex", 8000, 160, 1, 11000},
- {84, "speex", 16000, 320, 1, 22000},
-#endif
- // Comfort noise for four different sampling frequencies.
- {13, "CN", 8000, 240, 1, 0},
- {98, "CN", 16000, 480, 1, 0},
- {99, "CN", 32000, 960, 1, 0},
-#ifdef ENABLE_48000_HZ
- {100, "CN", 48000, 1440, 1, 0},
-#endif
-#ifdef WEBRTC_CODEC_AVT
- {106, "telephone-event", 8000, 240, 1, 0},
-#endif
-#ifdef WEBRTC_CODEC_RED
- {127, "red", 8000, 0, 1, 0},
-#endif
- // To prevent compile errors due to trailing commas.
- {-1, "Null", -1, -1, -1, -1}
-};
-
-// Create database with all codec settings at compile time.
-// Each entry needs the following parameters in the given order:
-// Number of allowed packet sizes, a vector with the allowed packet sizes,
-// Basic block samples, max number of channels that are supported.
-const ACMCodecDB::CodecSettings ACMCodecDB::codec_settings_[] = {
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
- {2, {kIsacPacSize480, kIsacPacSize960}, 0, 1},
-# if (defined(WEBRTC_CODEC_ISAC))
- {1, {kIsacPacSize960}, 0, 1},
- {1, {kIsacPacSize1440}, 0, 1},
-# endif
-#endif
-#ifdef WEBRTC_CODEC_PCM16
- // Mono
- {4, {80, 160, 240, 320}, 0, 2},
- {4, {160, 320, 480, 640}, 0, 2},
- {2, {320, 640}, 0, 2},
- // Stereo
- {4, {80, 160, 240, 320}, 0, 2},
- {4, {160, 320, 480, 640}, 0, 2},
- {2, {320, 640}, 0, 2},
-#endif
- // G.711, PCM mu-law and A-law.
- // Mono
- {6, {80, 160, 240, 320, 400, 480}, 0, 2},
- {6, {80, 160, 240, 320, 400, 480}, 0, 2},
- // Stereo
- {6, {80, 160, 240, 320, 400, 480}, 0, 2},
- {6, {80, 160, 240, 320, 400, 480}, 0, 2},
-#ifdef WEBRTC_CODEC_ILBC
- {4, {160, 240, 320, 480}, 0, 1},
-#endif
-#ifdef WEBRTC_CODEC_AMR
- {3, {160, 320, 480}, 0, 1},
-#endif
-#ifdef WEBRTC_CODEC_AMRWB
- {3, {320, 640, 960}, 0, 1},
-#endif
-#ifdef WEBRTC_CODEC_CELT
- // Mono
- {1, {640}, 0, 2},
- // Stereo
- {1, {640}, 0, 2},
-#endif
-#ifdef WEBRTC_CODEC_G722
- // Mono
- {6, {160, 320, 480, 640, 800, 960}, 0, 2},
- // Stereo
- {6, {160, 320, 480, 640, 800, 960}, 0, 2},
-#endif
-#ifdef WEBRTC_CODEC_G722_1
- {1, {320}, 320, 1},
- {1, {320}, 320, 1},
- {1, {320}, 320, 1},
-#endif
-#ifdef WEBRTC_CODEC_G722_1C
- {1, {640}, 640, 1},
- {1, {640}, 640, 1},
- {1, {640}, 640, 1},
-#endif
-#ifdef WEBRTC_CODEC_G729
- {6, {80, 160, 240, 320, 400, 480}, 0, 1},
-#endif
-#ifdef WEBRTC_CODEC_G729_1
- {3, {320, 640, 960}, 0, 1},
-#endif
-#ifdef WEBRTC_CODEC_GSMFR
- {3, {160, 320, 480}, 160, 1},
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- // Opus supports frames shorter than 10ms,
- // but it doesn't help us to use them.
- // Mono and stereo.
- {4, {480, 960, 1920, 2880}, 0, 2},
-#endif
-#ifdef WEBRTC_CODEC_SPEEX
- {3, {160, 320, 480}, 0, 1},
- {3, {320, 640, 960}, 0, 1},
-#endif
- // Comfort noise for three different sampling frequencies.
- {1, {240}, 240, 1},
- {1, {480}, 480, 1},
- {1, {960}, 960, 1},
-#ifdef ENABLE_48000_HZ
- {1, {1440}, 1440, 1},
-#endif
-#ifdef WEBRTC_CODEC_AVT
- {1, {240}, 240, 1},
-#endif
-#ifdef WEBRTC_CODEC_RED
- {1, {0}, 0, 1},
-#endif
- // To prevent compile errors due to trailing commas.
- {-1, {-1}, -1, -1}
-};
-
-// Create a database of all NetEQ decoders at compile time.
-const WebRtcNetEQDecoder ACMCodecDB::neteq_decoders_[] = {
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
- kDecoderISAC,
-# if (defined(WEBRTC_CODEC_ISAC))
- kDecoderISACswb,
- kDecoderISACfb,
-# endif
-#endif
-#ifdef WEBRTC_CODEC_PCM16
- // Mono
- kDecoderPCM16B,
- kDecoderPCM16Bwb,
- kDecoderPCM16Bswb32kHz,
- // Stereo
- kDecoderPCM16B_2ch,
- kDecoderPCM16Bwb_2ch,
- kDecoderPCM16Bswb32kHz_2ch,
-#endif
- // G.711, PCM mu-las and A-law.
- // Mono
- kDecoderPCMu,
- kDecoderPCMa,
- // Stereo
- kDecoderPCMu_2ch,
- kDecoderPCMa_2ch,
-#ifdef WEBRTC_CODEC_ILBC
- kDecoderILBC,
-#endif
-#ifdef WEBRTC_CODEC_AMR
- kDecoderAMR,
-#endif
-#ifdef WEBRTC_CODEC_AMRWB
- kDecoderAMRWB,
-#endif
-#ifdef WEBRTC_CODEC_CELT
- // Mono
- kDecoderCELT_32,
- // Stereo
- kDecoderCELT_32_2ch,
-#endif
-#ifdef WEBRTC_CODEC_G722
- // Mono
- kDecoderG722,
- // Stereo
- kDecoderG722_2ch,
-#endif
-#ifdef WEBRTC_CODEC_G722_1
- kDecoderG722_1_32,
- kDecoderG722_1_24,
- kDecoderG722_1_16,
-#endif
-#ifdef WEBRTC_CODEC_G722_1C
- kDecoderG722_1C_48,
- kDecoderG722_1C_32,
- kDecoderG722_1C_24,
-#endif
-#ifdef WEBRTC_CODEC_G729
- kDecoderG729,
-#endif
-#ifdef WEBRTC_CODEC_G729_1
- kDecoderG729_1,
-#endif
-#ifdef WEBRTC_CODEC_GSMFR
- kDecoderGSMFR,
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- // Mono and stereo.
- kDecoderOpus,
-#endif
-#ifdef WEBRTC_CODEC_SPEEX
- kDecoderSPEEX_8,
- kDecoderSPEEX_16,
-#endif
- // Comfort noise for three different sampling frequencies.
- kDecoderCNG,
- kDecoderCNG,
- kDecoderCNG,
-#ifdef ENABLE_48000_HZ
- kDecoderCNG,
-#endif
-#ifdef WEBRTC_CODEC_AVT
- kDecoderAVT,
-#endif
-#ifdef WEBRTC_CODEC_RED
- kDecoderRED,
-#endif
- kDecoderReservedEnd
-};
-
-// Get codec information from database.
-// TODO(tlegrand): replace memcpy with a pointer to the data base memory.
-int ACMCodecDB::Codec(int codec_id, CodecInst* codec_inst) {
- // Error check to see that codec_id is not out of bounds.
- if ((codec_id < 0) || (codec_id >= kNumCodecs)) {
- return -1;
- }
-
- // Copy database information for the codec to the output.
- memcpy(codec_inst, &database_[codec_id], sizeof(CodecInst));
-
- return 0;
-}
-
-// Enumerator for error codes when asking for codec database id.
-enum {
- kInvalidCodec = -10,
- kInvalidPayloadtype = -30,
- kInvalidPacketSize = -40,
- kInvalidRate = -50
-};
-
-// Gets the codec id number from the database. If there is some mismatch in
-// the codec settings, the function will return an error code.
-// NOTE! The first mismatch found will generate the return value.
-int ACMCodecDB::CodecNumber(const CodecInst* codec_inst, int* mirror_id) {
- // Look for a matching codec in the database.
- int codec_id = CodecId(codec_inst);
-
- // Checks if we found a matching codec.
- if (codec_id == -1) {
- return kInvalidCodec;
- }
-
- // Checks the validity of payload type
- if (!ValidPayloadType(codec_inst->pltype)) {
- return kInvalidPayloadtype;
- }
-
- // Comfort Noise is special case, packet-size & rate is not checked.
- if (STR_CASE_CMP(database_[codec_id].plname, "CN") == 0) {
- *mirror_id = codec_id;
- return codec_id;
- }
-
- // RED is special case, packet-size & rate is not checked.
- if (STR_CASE_CMP(database_[codec_id].plname, "red") == 0) {
- *mirror_id = codec_id;
- return codec_id;
- }
-
- // Checks the validity of packet size.
- if (codec_settings_[codec_id].num_packet_sizes > 0) {
- bool packet_size_ok = false;
- int i;
- int packet_size_samples;
- for (i = 0; i < codec_settings_[codec_id].num_packet_sizes; i++) {
- packet_size_samples =
- codec_settings_[codec_id].packet_sizes_samples[i];
- if (codec_inst->pacsize == packet_size_samples) {
- packet_size_ok = true;
- break;
- }
- }
-
- if (!packet_size_ok) {
- return kInvalidPacketSize;
- }
- }
-
- if (codec_inst->pacsize < 1) {
- return kInvalidPacketSize;
- }
-
- // Check the validity of rate. Codecs with multiple rates have their own
- // function for this.
- *mirror_id = codec_id;
- if (STR_CASE_CMP("isac", codec_inst->plname) == 0) {
- if (IsISACRateValid(codec_inst->rate)) {
- // Set mirrorID to iSAC WB which is only created once to be used both for
- // iSAC WB and SWB, because they need to share struct.
- *mirror_id = kISAC;
- return codec_id;
- } else {
- return kInvalidRate;
- }
- } else if (STR_CASE_CMP("ilbc", codec_inst->plname) == 0) {
- return IsILBCRateValid(codec_inst->rate, codec_inst->pacsize)
- ? codec_id : kInvalidRate;
- } else if (STR_CASE_CMP("amr", codec_inst->plname) == 0) {
- return IsAMRRateValid(codec_inst->rate)
- ? codec_id : kInvalidRate;
- } else if (STR_CASE_CMP("amr-wb", codec_inst->plname) == 0) {
- return IsAMRwbRateValid(codec_inst->rate)
- ? codec_id : kInvalidRate;
- } else if (STR_CASE_CMP("g7291", codec_inst->plname) == 0) {
- return IsG7291RateValid(codec_inst->rate)
- ? codec_id : kInvalidRate;
- } else if (STR_CASE_CMP("opus", codec_inst->plname) == 0) {
- return IsOpusRateValid(codec_inst->rate)
- ? codec_id : kInvalidRate;
- } else if (STR_CASE_CMP("speex", codec_inst->plname) == 0) {
- return IsSpeexRateValid(codec_inst->rate)
- ? codec_id : kInvalidRate;
- } else if (STR_CASE_CMP("celt", codec_inst->plname) == 0) {
- return IsCeltRateValid(codec_inst->rate)
- ? codec_id : kInvalidRate;
- }
-
- return IsRateValid(codec_id, codec_inst->rate) ?
- codec_id : kInvalidRate;
-}
-
-// Looks for a matching payload name, frequency, and channels in the
-// codec list. Need to check all three since some codecs have several codec
-// entries with different frequencies and/or channels.
-// Does not check other codec settings, such as payload type and packet size.
-// Returns the id of the codec, or -1 if no match is found.
-int ACMCodecDB::CodecId(const CodecInst* codec_inst) {
- return (CodecId(codec_inst->plname, codec_inst->plfreq,
- codec_inst->channels));
-}
-
-int ACMCodecDB::CodecId(const char* payload_name, int frequency, int channels) {
- for (int id = 0; id < kNumCodecs; id++) {
- bool name_match = false;
- bool frequency_match = false;
- bool channels_match = false;
-
- // Payload name, sampling frequency and number of channels need to match.
- // NOTE! If |frequency| is -1, the frequency is not applicable, and is
- // always treated as true, like for RED.
- name_match = (STR_CASE_CMP(database_[id].plname, payload_name) == 0);
- frequency_match = (frequency == database_[id].plfreq) || (frequency == -1);
- // The number of channels must match for all codecs but Opus.
- if (STR_CASE_CMP(payload_name, "opus") != 0) {
- channels_match = (channels == database_[id].channels);
- } else {
- // For opus we just check that number of channels is valid.
- channels_match = (channels == 1 || channels == 2);
- }
-
- if (name_match && frequency_match && channels_match) {
- // We have found a matching codec in the list.
- return id;
- }
- }
-
- // We didn't find a matching codec.
- return -1;
-}
-// Gets codec id number, and mirror id, from database for the receiver.
-int ACMCodecDB::ReceiverCodecNumber(const CodecInst* codec_inst,
- int* mirror_id) {
- // Look for a matching codec in the database.
- int codec_id = CodecId(codec_inst);
-
- // Set |mirror_id| to |codec_id|, except for iSAC. In case of iSAC we always
- // set |mirror_id| to iSAC WB (kISAC) which is only created once to be used
- // both for iSAC WB and SWB, because they need to share struct.
- if (STR_CASE_CMP(codec_inst->plname, "ISAC") != 0) {
- *mirror_id = codec_id;
- } else {
- *mirror_id = kISAC;
- }
-
- return codec_id;
-}
-
-// Returns the codec sampling frequency for codec with id = "codec_id" in
-// database.
-int ACMCodecDB::CodecFreq(int codec_id) {
- // Error check to see that codec_id is not out of bounds.
- if (codec_id < 0 || codec_id >= kNumCodecs) {
- return -1;
- }
-
- return database_[codec_id].plfreq;
-}
-
-// Returns the codec's basic coding block size in samples.
-int ACMCodecDB::BasicCodingBlock(int codec_id) {
- // Error check to see that codec_id is not out of bounds.
- if (codec_id < 0 || codec_id >= kNumCodecs) {
- return -1;
- }
-
- return codec_settings_[codec_id].basic_block_samples;
-}
-
-// Returns the NetEQ decoder database.
-const WebRtcNetEQDecoder* ACMCodecDB::NetEQDecoders() {
- return neteq_decoders_;
-}
-
-// Gets mirror id. The Id is used for codecs sharing struct for settings that
-// need different payload types.
-int ACMCodecDB::MirrorID(int codec_id) {
- if (STR_CASE_CMP(database_[codec_id].plname, "isac") == 0) {
- return kISAC;
- } else {
- return codec_id;
- }
-}
-
-// Creates memory/instance for storing codec state.
-ACMGenericCodec* ACMCodecDB::CreateCodecInstance(const CodecInst* codec_inst) {
- // All we have support for right now.
- if (!STR_CASE_CMP(codec_inst->plname, "ISAC")) {
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
- return new ACMISAC(kISAC);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "PCMU")) {
- if (codec_inst->channels == 1) {
- return new ACMPCMU(kPCMU);
- } else {
- return new ACMPCMU(kPCMU_2ch);
- }
- } else if (!STR_CASE_CMP(codec_inst->plname, "PCMA")) {
- if (codec_inst->channels == 1) {
- return new ACMPCMA(kPCMA);
- } else {
- return new ACMPCMA(kPCMA_2ch);
- }
- } else if (!STR_CASE_CMP(codec_inst->plname, "ILBC")) {
-#ifdef WEBRTC_CODEC_ILBC
- return new ACMILBC(kILBC);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "AMR")) {
-#ifdef WEBRTC_CODEC_AMR
- return new ACMAMR(kGSMAMR);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "AMR-WB")) {
-#ifdef WEBRTC_CODEC_AMRWB
- return new ACMAMRwb(kGSMAMRWB);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "CELT")) {
-#ifdef WEBRTC_CODEC_CELT
- if (codec_inst->channels == 1) {
- return new ACMCELT(kCELT32);
- } else {
- return new ACMCELT(kCELT32_2ch);
- }
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "G722")) {
-#ifdef WEBRTC_CODEC_G722
- if (codec_inst->channels == 1) {
- return new ACMG722(kG722);
- } else {
- return new ACMG722(kG722_2ch);
- }
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "G7221")) {
- switch (codec_inst->plfreq) {
- case 16000: {
-#ifdef WEBRTC_CODEC_G722_1
- int codec_id;
- switch (codec_inst->rate) {
- case 16000 : {
- codec_id = kG722_1_16;
- break;
- }
- case 24000 : {
- codec_id = kG722_1_24;
- break;
- }
- case 32000 : {
- codec_id = kG722_1_32;
- break;
- }
- default: {
- return NULL;
- }
- }
- return new ACMG722_1(codec_id);
-#endif
- }
- case 32000: {
-#ifdef WEBRTC_CODEC_G722_1C
- int codec_id;
- switch (codec_inst->rate) {
- case 24000 : {
- codec_id = kG722_1C_24;
- break;
- }
- case 32000 : {
- codec_id = kG722_1C_32;
- break;
- }
- case 48000 : {
- codec_id = kG722_1C_48;
- break;
- }
- default: {
- return NULL;
- }
- }
- return new ACMG722_1C(codec_id);
-#endif
- }
- }
- } else if (!STR_CASE_CMP(codec_inst->plname, "CN")) {
- // For CN we need to check sampling frequency to know what codec to create.
- int codec_id;
- switch (codec_inst->plfreq) {
- case 8000: {
- codec_id = kCNNB;
- break;
- }
- case 16000: {
- codec_id = kCNWB;
- break;
- }
- case 32000: {
- codec_id = kCNSWB;
- break;
- }
-#ifdef ENABLE_48000_HZ
- case 48000: {
- codec_id = kCNFB;
- break;
- }
-#endif
- default: {
- return NULL;
- }
- }
- return new ACMCNG(codec_id);
- } else if (!STR_CASE_CMP(codec_inst->plname, "G729")) {
-#ifdef WEBRTC_CODEC_G729
- return new ACMG729(kG729);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "G7291")) {
-#ifdef WEBRTC_CODEC_G729_1
- return new ACMG729_1(kG729_1);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "opus")) {
-#ifdef WEBRTC_CODEC_OPUS
- return new ACMOpus(kOpus);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "speex")) {
-#ifdef WEBRTC_CODEC_SPEEX
- int codec_id;
- switch (codec_inst->plfreq) {
- case 8000: {
- codec_id = kSPEEX8;
- break;
- }
- case 16000: {
- codec_id = kSPEEX16;
- break;
- }
- default: {
- return NULL;
- }
- }
- return new ACMSPEEX(codec_id);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "CN")) {
- // For CN we need to check sampling frequency to know what codec to create.
- int codec_id;
- switch (codec_inst->plfreq) {
- case 8000: {
- codec_id = kCNNB;
- break;
- }
- case 16000: {
- codec_id = kCNWB;
- break;
- }
- case 32000: {
- codec_id = kCNSWB;
- break;
- }
-#ifdef ENABLE_48000_HZ
- case 48000: {
- codec_id = kCNFB;
- break;
- }
-#endif
- default: {
- return NULL;
- }
- }
- return new ACMCNG(codec_id);
- } else if (!STR_CASE_CMP(codec_inst->plname, "L16")) {
-#ifdef WEBRTC_CODEC_PCM16
- // For L16 we need to check sampling frequency to know what codec to create.
- int codec_id;
- if (codec_inst->channels == 1) {
- switch (codec_inst->plfreq) {
- case 8000: {
- codec_id = kPCM16B;
- break;
- }
- case 16000: {
- codec_id = kPCM16Bwb;
- break;
- }
- case 32000: {
- codec_id = kPCM16Bswb32kHz;
- break;
- }
- default: {
- return NULL;
- }
- }
- } else {
- switch (codec_inst->plfreq) {
- case 8000: {
- codec_id = kPCM16B_2ch;
- break;
- }
- case 16000: {
- codec_id = kPCM16Bwb_2ch;
- break;
- }
- case 32000: {
- codec_id = kPCM16Bswb32kHz_2ch;
- break;
- }
- default: {
- return NULL;
- }
- }
- }
- return new ACMPCM16B(codec_id);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "telephone-event")) {
-#ifdef WEBRTC_CODEC_AVT
- return new ACMDTMFPlayout(kAVT);
-#endif
- } else if (!STR_CASE_CMP(codec_inst->plname, "red")) {
-#ifdef WEBRTC_CODEC_RED
- return new ACMRED(kRED);
-#endif
- }
- return NULL;
-}
-
-// Checks if the bitrate is valid for the codec.
-bool ACMCodecDB::IsRateValid(int codec_id, int rate) {
- if (database_[codec_id].rate == rate) {
- return true;
- } else {
- return false;
- }
-}
-
-// Checks if the bitrate is valid for iSAC.
-bool ACMCodecDB::IsISACRateValid(int rate) {
- if ((rate == -1) || ((rate <= 56000) && (rate >= 10000))) {
- return true;
- } else {
- return false;
- }
-}
-
-// Checks if the bitrate is valid for iLBC.
-bool ACMCodecDB::IsILBCRateValid(int rate, int frame_size_samples) {
- if (((frame_size_samples == 240) || (frame_size_samples == 480)) &&
- (rate == 13300)) {
- return true;
- } else if (((frame_size_samples == 160) || (frame_size_samples == 320)) &&
- (rate == 15200)) {
- return true;
- } else {
- return false;
- }
-}
-
-// Check if the bitrate is valid for the GSM-AMR.
-bool ACMCodecDB::IsAMRRateValid(int rate) {
- switch (rate) {
- case 4750:
- case 5150:
- case 5900:
- case 6700:
- case 7400:
- case 7950:
- case 10200:
- case 12200: {
- return true;
- }
- default: {
- return false;
- }
- }
-}
-
-// Check if the bitrate is valid for GSM-AMR-WB.
-bool ACMCodecDB::IsAMRwbRateValid(int rate) {
- switch (rate) {
- case 7000:
- case 9000:
- case 12000:
- case 14000:
- case 16000:
- case 18000:
- case 20000:
- case 23000:
- case 24000: {
- return true;
- }
- default: {
- return false;
- }
- }
-}
-
-// Check if the bitrate is valid for G.729.1.
-bool ACMCodecDB::IsG7291RateValid(int rate) {
- switch (rate) {
- case 8000:
- case 12000:
- case 14000:
- case 16000:
- case 18000:
- case 20000:
- case 22000:
- case 24000:
- case 26000:
- case 28000:
- case 30000:
- case 32000: {
- return true;
- }
- default: {
- return false;
- }
- }
-}
-
-// Checks if the bitrate is valid for Speex.
-bool ACMCodecDB::IsSpeexRateValid(int rate) {
- if (rate > 2000) {
- return true;
- } else {
- return false;
- }
-}
-
-// Checks if the bitrate is valid for Opus.
-bool ACMCodecDB::IsOpusRateValid(int rate) {
- if ((rate < 6000) || (rate > 510000)) {
- return false;
- }
- return true;
-}
-
-// Checks if the bitrate is valid for Celt.
-bool ACMCodecDB::IsCeltRateValid(int rate) {
- if ((rate >= 48000) && (rate <= 128000)) {
- return true;
- } else {
- return false;
- }
-}
-
-// Checks if the payload type is in the valid range.
-bool ACMCodecDB::ValidPayloadType(int payload_type) {
- if ((payload_type < 0) || (payload_type > 127)) {
- return false;
- }
- return true;
-}
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_codec_database.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_codec_database.h
deleted file mode 100644
index 7a7054dd1aa..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_codec_database.h
+++ /dev/null
@@ -1,336 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file generates databases with information about all supported audio
- * codecs.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CODEC_DATABASE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CODEC_DATABASE_H_
-
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-// TODO(tlegrand): replace class ACMCodecDB with a namespace.
-class ACMCodecDB {
- public:
- // Enum with array indexes for the supported codecs. NOTE! The order MUST
- // be the same as when creating the database in acm_codec_database.cc.
- enum {
- kNone = -1
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
- , kISAC
-# if (defined(WEBRTC_CODEC_ISAC))
- , kISACSWB
- , kISACFB
-# endif
-#endif
-#ifdef WEBRTC_CODEC_PCM16
- // Mono
- , kPCM16B
- , kPCM16Bwb
- , kPCM16Bswb32kHz
- // Stereo
- , kPCM16B_2ch
- , kPCM16Bwb_2ch
- , kPCM16Bswb32kHz_2ch
-#endif
- // Mono
- , kPCMU
- , kPCMA
- // Stereo
- , kPCMU_2ch
- , kPCMA_2ch
-#ifdef WEBRTC_CODEC_ILBC
- , kILBC
-#endif
-#ifdef WEBRTC_CODEC_AMR
- , kGSMAMR
-#endif
-#ifdef WEBRTC_CODEC_AMRWB
- , kGSMAMRWB
-#endif
-#ifdef WEBRTC_CODEC_CELT
- // Mono
- , kCELT32
- // Stereo
- , kCELT32_2ch
-#endif
-#ifdef WEBRTC_CODEC_G722
- // Mono
- , kG722
- // Stereo
- , kG722_2ch
-#endif
-#ifdef WEBRTC_CODEC_G722_1
- , kG722_1_32
- , kG722_1_24
- , kG722_1_16
-#endif
-#ifdef WEBRTC_CODEC_G722_1C
- , kG722_1C_48
- , kG722_1C_32
- , kG722_1C_24
-#endif
-#ifdef WEBRTC_CODEC_G729
- , kG729
-#endif
-#ifdef WEBRTC_CODEC_G729_1
- , kG729_1
-#endif
-#ifdef WEBRTC_CODEC_GSMFR
- , kGSMFR
-#endif
-#ifdef WEBRTC_CODEC_OPUS
- // Mono and stereo
- , kOpus
-#endif
-#ifdef WEBRTC_CODEC_SPEEX
- , kSPEEX8
- , kSPEEX16
-#endif
- , kCNNB
- , kCNWB
- , kCNSWB
-#ifdef ENABLE_48000_HZ
- , kCNFB
-#endif
-#ifdef WEBRTC_CODEC_AVT
- , kAVT
-#endif
-#ifdef WEBRTC_CODEC_RED
- , kRED
-#endif
- , kNumCodecs
- };
-
- // Set unsupported codecs to -1
-#ifndef WEBRTC_CODEC_ISAC
- enum {kISACSWB = -1};
- enum {kISACFB = -1};
-# ifndef WEBRTC_CODEC_ISACFX
- enum {kISAC = -1};
-# endif
-#endif
-#ifndef WEBRTC_CODEC_PCM16
- // Mono
- enum {kPCM16B = -1};
- enum {kPCM16Bwb = -1};
- enum {kPCM16Bswb32kHz = -1};
- // Stereo
- enum {kPCM16B_2ch = -1};
- enum {kPCM16Bwb_2ch = -1};
- enum {kPCM16Bswb32kHz_2ch = -1};
-#endif
- // 48 kHz not supported, always set to -1.
- enum {kPCM16Bswb48kHz = -1};
-#ifndef WEBRTC_CODEC_ILBC
- enum {kILBC = -1};
-#endif
-#ifndef WEBRTC_CODEC_AMR
- enum {kGSMAMR = -1};
-#endif
-#ifndef WEBRTC_CODEC_AMRWB
- enum {kGSMAMRWB = -1};
-#endif
-#ifndef WEBRTC_CODEC_CELT
- // Mono
- enum {kCELT32 = -1};
- // Stereo
- enum {kCELT32_2ch = -1};
-#endif
-#ifndef WEBRTC_CODEC_G722
- // Mono
- enum {kG722 = -1};
- // Stereo
- enum {kG722_2ch = -1};
-#endif
-#ifndef WEBRTC_CODEC_G722_1
- enum {kG722_1_32 = -1};
- enum {kG722_1_24 = -1};
- enum {kG722_1_16 = -1};
-#endif
-#ifndef WEBRTC_CODEC_G722_1C
- enum {kG722_1C_48 = -1};
- enum {kG722_1C_32 = -1};
- enum {kG722_1C_24 = -1};
-#endif
-#ifndef WEBRTC_CODEC_G729
- enum {kG729 = -1};
-#endif
-#ifndef WEBRTC_CODEC_G729_1
- enum {kG729_1 = -1};
-#endif
-#ifndef WEBRTC_CODEC_GSMFR
- enum {kGSMFR = -1};
-#endif
-#ifndef WEBRTC_CODEC_SPEEX
- enum {kSPEEX8 = -1};
- enum {kSPEEX16 = -1};
-#endif
-#ifndef WEBRTC_CODEC_OPUS
- // Mono and stereo
- enum {kOpus = -1};
-#endif
-#ifndef WEBRTC_CODEC_AVT
- enum {kAVT = -1};
-#endif
-#ifndef WEBRTC_CODEC_RED
- enum {kRED = -1};
-#endif
-
- // kMaxNumCodecs - Maximum number of codecs that can be activated in one
- // build.
- // kMaxNumPacketSize - Maximum number of allowed packet sizes for one codec.
- // These might need to be increased if adding a new codec to the database
- static const int kMaxNumCodecs = 50;
- static const int kMaxNumPacketSize = 6;
-
- // Codec specific settings
- //
- // num_packet_sizes - number of allowed packet sizes.
- // packet_sizes_samples - list of the allowed packet sizes.
- // basic_block_samples - assigned a value different from 0 if the codec
- // requires to be fed with a specific number of samples
- // that can be different from packet size.
- // channel_support - number of channels supported to encode;
- // 1 = mono, 2 = stereo, etc.
- struct CodecSettings {
- int num_packet_sizes;
- int packet_sizes_samples[kMaxNumPacketSize];
- int basic_block_samples;
- int channel_support;
- };
-
- // Gets codec information from database at the position in database given by
- // [codec_id].
- // Input:
- // [codec_id] - number that specifies at what position in the database to
- // get the information.
- // Output:
- // [codec_inst] - filled with information about the codec.
- // Return:
- // 0 if successful, otherwise -1.
- static int Codec(int codec_id, CodecInst* codec_inst);
-
- // Returns codec id and mirror id from database, given the information
- // received in the input [codec_inst]. Mirror id is a number that tells
- // where to find the codec's memory (instance). The number is either the
- // same as codec id (most common), or a number pointing at a different
- // entry in the database, if the codec has several entries with different
- // payload types. This is used for codecs that must share one struct even if
- // the payload type differs.
- // One example is the codec iSAC which has the same struct for both 16 and
- // 32 khz, but they have different entries in the database. Let's say the
- // function is called with iSAC 32kHz. The function will return 1 as that is
- // the entry in the data base, and [mirror_id] = 0, as that is the entry for
- // iSAC 16 kHz, which holds the shared memory.
- // Input:
- // [codec_inst] - Information about the codec for which we require the
- // database id.
- // Output:
- // [mirror_id] - mirror id, which most often is the same as the return
- // value, see above.
- // Return:
- // codec id if successful, otherwise < 0.
- static int CodecNumber(const CodecInst* codec_inst, int* mirror_id);
- static int CodecId(const CodecInst* codec_inst);
- static int CodecId(const char* payload_name, int frequency, int channels);
- static int ReceiverCodecNumber(const CodecInst* codec_inst, int* mirror_id);
-
- // Returns the codec sampling frequency for codec with id = "codec_id" in
- // database.
- // TODO(tlegrand): Check if function is needed, or if we can change
- // to access database directly.
- // Input:
- // [codec_id] - number that specifies at what position in the database to
- // get the information.
- // Return:
- // codec sampling frequency if successful, otherwise -1.
- static int CodecFreq(int codec_id);
-
- // Return the codec's basic coding block size in samples.
- // TODO(tlegrand): Check if function is needed, or if we can change
- // to access database directly.
- // Input:
- // [codec_id] - number that specifies at what position in the database to
- // get the information.
- // Return:
- // codec basic block size if successful, otherwise -1.
- static int BasicCodingBlock(int codec_id);
-
- // Returns the NetEQ decoder database.
- static const WebRtcNetEQDecoder* NetEQDecoders();
-
- // Returns mirror id, which is a number that tells where to find the codec's
- // memory (instance). It is either the same as codec id (most common), or a
- // number pointing at a different entry in the database, if the codec have
- // several entries with different payload types. This is used for codecs that
- // must share struct even if the payload type differs.
- // TODO(tlegrand): Check if function is needed, or if we can change
- // to access database directly.
- // Input:
- // [codec_id] - number that specifies codec's position in the database.
- // Return:
- // Mirror id on success, otherwise -1.
- static int MirrorID(int codec_id);
-
- // Create memory/instance for storing codec state.
- // Input:
- // [codec_inst] - information about codec. Only name of codec, "plname", is
- // used in this function.
- static ACMGenericCodec* CreateCodecInstance(const CodecInst* codec_inst);
-
- // Checks if the bitrate is valid for the codec.
- // Input:
- // [codec_id] - number that specifies codec's position in the database.
- // [rate] - bitrate to check.
- // [frame_size_samples] - (used for iLBC) specifies which frame size to go
- // with the rate.
- static bool IsRateValid(int codec_id, int rate);
- static bool IsISACRateValid(int rate);
- static bool IsILBCRateValid(int rate, int frame_size_samples);
- static bool IsAMRRateValid(int rate);
- static bool IsAMRwbRateValid(int rate);
- static bool IsG7291RateValid(int rate);
- static bool IsSpeexRateValid(int rate);
- static bool IsOpusRateValid(int rate);
- static bool IsCeltRateValid(int rate);
-
- // Check if the payload type is valid, meaning that it is in the valid range
- // of 0 to 127.
- // Input:
- // [payload_type] - payload type.
- static bool ValidPayloadType(int payload_type);
-
- // Databases with information about the supported codecs
- // database_ - stored information about all codecs: payload type, name,
- // sampling frequency, packet size in samples, default channel
- // support, and default rate.
- // codec_settings_ - stored codec settings: number of allowed packet sizes,
- // a vector with the allowed packet sizes, basic block
- // samples, and max number of channels that are supported.
- // neteq_decoders_ - list of supported decoders in NetEQ.
- static const CodecInst database_[kMaxNumCodecs];
- static const CodecSettings codec_settings_[kMaxNumCodecs];
- static const WebRtcNetEQDecoder neteq_decoders_[kMaxNumCodecs];
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CODEC_DATABASE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.cc
deleted file mode 100644
index edb62987689..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.cc
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_dtmf_detection.h"
-
-#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-ACMDTMFDetection::ACMDTMFDetection() {}
-
-ACMDTMFDetection::~ACMDTMFDetection() {}
-
-int16_t ACMDTMFDetection::Enable(ACMCountries /* cpt */) {
- return -1;
-}
-
-int16_t ACMDTMFDetection::Disable() {
- return -1;
-}
-
-int16_t ACMDTMFDetection::Detect(
- const int16_t* /* in_audio_buff */,
- const uint16_t /* in_buff_len_word16 */,
- const int32_t /* in_freq_hz */,
- bool& /* tone_detected */,
- int16_t& /* tone */) {
- return -1;
-}
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.h
deleted file mode 100644
index 74553107a36..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.h
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_DETECTION_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_DETECTION_H_
-
-#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_resampler.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMDTMFDetection {
- public:
- ACMDTMFDetection();
- ~ACMDTMFDetection();
- int16_t Enable(ACMCountries cpt = ACMDisableCountryDetection);
- int16_t Disable();
- int16_t Detect(const int16_t* in_audio_buff,
- const uint16_t in_buff_len_word16,
- const int32_t in_freq_hz,
- bool& tone_detected,
- int16_t& tone);
-
- private:
- ACMResampler resampler_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_DETECTION_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.cc
deleted file mode 100644
index 32195e6fe82..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.cc
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_dtmf_playout.h"
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_AVT
-
-ACMDTMFPlayout::ACMDTMFPlayout(
- int16_t /* codec_id */) {
- return;
-}
-
-ACMDTMFPlayout::~ACMDTMFPlayout() {
- return;
-}
-
-int16_t ACMDTMFPlayout::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMDTMFPlayout::DecodeSafe(
- uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMDTMFPlayout::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMDTMFPlayout::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMDTMFPlayout::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMDTMFPlayout::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMDTMFPlayout::InternalCreateEncoder() {
- return -1;
-}
-
-int16_t ACMDTMFPlayout::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMDTMFPlayout::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-void ACMDTMFPlayout::DestructEncoderSafe() {
- return;
-}
-
-void ACMDTMFPlayout::DestructDecoderSafe() {
- return;
-}
-
-#else //===================== Actual Implementation =======================
-
-ACMDTMFPlayout::ACMDTMFPlayout(int16_t codec_id) {
- codec_id_ = codec_id;
-}
-
-ACMDTMFPlayout::~ACMDTMFPlayout() {
- return;
-}
-
-int16_t ACMDTMFPlayout::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return 0;
-}
-
-int16_t ACMDTMFPlayout::DecodeSafe(
- uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMDTMFPlayout::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization,
- // DTMFPlayout has no instance
- return 0;
-}
-
-int16_t ACMDTMFPlayout::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization,
- // DTMFPlayout has no instance
- return 0;
-}
-
-int32_t ACMDTMFPlayout::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_AVT_FUNCTION."
- // Then call NetEQ to add the codec to it's
- // database.
- SET_CODEC_PAR((codec_def), kDecoderAVT, codec_inst.pltype, NULL, 8000);
- SET_AVT_FUNCTIONS((codec_def));
- return 0;
-}
-
-ACMGenericCodec* ACMDTMFPlayout::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMDTMFPlayout::InternalCreateEncoder() {
- // DTMFPlayout has no instance
- return 0;
-}
-
-int16_t ACMDTMFPlayout::InternalCreateDecoder() {
- // DTMFPlayout has no instance
- return 0;
-}
-
-void ACMDTMFPlayout::InternalDestructEncoderInst(void* /* ptr_inst */) {
- // DTMFPlayout has no instance
- return;
-}
-
-void ACMDTMFPlayout::DestructEncoderSafe() {
- // DTMFPlayout has no instance
- return;
-}
-
-void ACMDTMFPlayout::DestructDecoderSafe() {
- // DTMFPlayout has no instance
- return;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.h
deleted file mode 100644
index 46175f59e66..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_PLAYOUT_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_PLAYOUT_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMDTMFPlayout: public ACMGenericCodec {
- public:
- explicit ACMDTMFPlayout(int16_t codec_id);
- virtual ~ACMDTMFPlayout();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_PLAYOUT_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g722.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g722.cc
deleted file mode 100644
index 1c19109b6a3..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g722.cc
+++ /dev/null
@@ -1,358 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_g722.h"
-
-#include "webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h"
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_G722
-
-ACMG722::ACMG722(int16_t /* codec_id */)
- : ptr_enc_str_(NULL),
- ptr_dec_str_(NULL),
- encoder_inst_ptr_(NULL),
- encoder_inst_ptr_right_(NULL),
- decoder_inst_ptr_(NULL) {}
-
-ACMG722::~ACMG722() {}
-
-int32_t ACMG722::Add10MsDataSafe(
- const uint32_t /* timestamp */,
- const int16_t* /* data */,
- const uint16_t /* length_smpl */,
- const uint8_t /* audio_channel */) {
- return -1;
-}
-
-int16_t ACMG722::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMG722::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMG722::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMG722::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMG722::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMG722::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMG722::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMG722::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMG722::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMG722::DestructDecoderSafe() {
- return;
-}
-
-void ACMG722::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-void ACMG722::SplitStereoPacket(uint8_t* /*payload*/,
- int32_t* /*payload_length*/) {}
-
-#else //===================== Actual Implementation =======================
-
-// Encoder and decoder memory
-struct ACMG722EncStr {
- G722EncInst* inst; // instance for left channel in case of stereo
- G722EncInst* inst_right; // instance for right channel in case of stereo
-};
-struct ACMG722DecStr {
- G722DecInst* inst; // instance for left channel in case of stereo
- G722DecInst* inst_right; // instance for right channel in case of stereo
-};
-
-ACMG722::ACMG722(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- encoder_inst_ptr_right_(NULL),
- decoder_inst_ptr_(NULL) {
- // Encoder
- ptr_enc_str_ = new ACMG722EncStr;
- if (ptr_enc_str_ != NULL) {
- ptr_enc_str_->inst = NULL;
- ptr_enc_str_->inst_right = NULL;
- }
- // Decoder
- ptr_dec_str_ = new ACMG722DecStr;
- if (ptr_dec_str_ != NULL) {
- ptr_dec_str_->inst = NULL;
- ptr_dec_str_->inst_right = NULL; // Not used
- }
- codec_id_ = codec_id;
- return;
-}
-
-ACMG722::~ACMG722() {
- // Encoder
- if (ptr_enc_str_ != NULL) {
- if (ptr_enc_str_->inst != NULL) {
- WebRtcG722_FreeEncoder(ptr_enc_str_->inst);
- ptr_enc_str_->inst = NULL;
- }
- if (ptr_enc_str_->inst_right != NULL) {
- WebRtcG722_FreeEncoder(ptr_enc_str_->inst_right);
- ptr_enc_str_->inst_right = NULL;
- }
- delete ptr_enc_str_;
- ptr_enc_str_ = NULL;
- }
- // Decoder
- if (ptr_dec_str_ != NULL) {
- if (ptr_dec_str_->inst != NULL) {
- WebRtcG722_FreeDecoder(ptr_dec_str_->inst);
- ptr_dec_str_->inst = NULL;
- }
- if (ptr_dec_str_->inst_right != NULL) {
- WebRtcG722_FreeDecoder(ptr_dec_str_->inst_right);
- ptr_dec_str_->inst_right = NULL;
- }
- delete ptr_dec_str_;
- ptr_dec_str_ = NULL;
- }
- return;
-}
-
-int32_t ACMG722::Add10MsDataSafe(const uint32_t timestamp,
- const int16_t* data,
- const uint16_t length_smpl,
- const uint8_t audio_channel) {
- return ACMGenericCodec::Add10MsDataSafe((timestamp >> 1), data, length_smpl,
- audio_channel);
-}
-
-int16_t ACMG722::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- // If stereo, split input signal in left and right channel before encoding
- if (num_channels_ == 2) {
- int16_t left_channel[960];
- int16_t right_channel[960];
- uint8_t out_left[480];
- uint8_t out_right[480];
- int16_t len_in_bytes;
- for (int i = 0, j = 0; i < frame_len_smpl_ * 2; i += 2, j++) {
- left_channel[j] = in_audio_[in_audio_ix_read_ + i];
- right_channel[j] = in_audio_[in_audio_ix_read_ + i + 1];
- }
- len_in_bytes = WebRtcG722_Encode(encoder_inst_ptr_, left_channel,
- frame_len_smpl_,
- (int16_t*)out_left);
- len_in_bytes += WebRtcG722_Encode(encoder_inst_ptr_right_, right_channel,
- frame_len_smpl_,
- (int16_t*)out_right);
- *bitstream_len_byte = len_in_bytes;
-
- // Interleave the 4 bits per sample from left and right channel
- for (int i = 0, j = 0; i < len_in_bytes; i += 2, j++) {
- bitstream[i] = (out_left[j] & 0xF0) + (out_right[j] >> 4);
- bitstream[i + 1] = ((out_left[j] & 0x0F) << 4) + (out_right[j] & 0x0F);
- }
- } else {
- *bitstream_len_byte = WebRtcG722_Encode(encoder_inst_ptr_,
- &in_audio_[in_audio_ix_read_],
- frame_len_smpl_,
- (int16_t*)bitstream);
- }
-
- // increment the read index this tell the caller how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += frame_len_smpl_ * num_channels_;
- return *bitstream_len_byte;
-}
-
-int16_t ACMG722::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMG722::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
- if (codec_params->codec_inst.channels == 2) {
- // Create codec struct for right channel
- if (ptr_enc_str_->inst_right == NULL) {
- WebRtcG722_CreateEncoder(&ptr_enc_str_->inst_right);
- if (ptr_enc_str_->inst_right == NULL) {
- return -1;
- }
- }
- encoder_inst_ptr_right_ = ptr_enc_str_->inst_right;
- if (WebRtcG722_EncoderInit(encoder_inst_ptr_right_) < 0) {
- return -1;
- }
- }
-
- return WebRtcG722_EncoderInit(encoder_inst_ptr_);
-}
-
-int16_t ACMG722::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return WebRtcG722_DecoderInit(decoder_inst_ptr_);
-}
-
-int32_t ACMG722::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- // TODO(turajs): log error
- return -1;
- }
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_G722_FUNCTION."
- // Then call NetEQ to add the codec to it's
- // database.
- if (codec_inst.channels == 1) {
- SET_CODEC_PAR(codec_def, kDecoderG722, codec_inst.pltype, decoder_inst_ptr_,
- 16000);
- } else {
- SET_CODEC_PAR(codec_def, kDecoderG722_2ch, codec_inst.pltype,
- decoder_inst_ptr_, 16000);
- }
- SET_G722_FUNCTIONS(codec_def);
- return 0;
-}
-
-ACMGenericCodec* ACMG722::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMG722::InternalCreateEncoder() {
- if (ptr_enc_str_ == NULL) {
- // this structure must be created at the costructor
- // if it is still NULL then there is a probelm and
- // we dont continue
- return -1;
- }
- WebRtcG722_CreateEncoder(&ptr_enc_str_->inst);
- if (ptr_enc_str_->inst == NULL) {
- return -1;
- }
- encoder_inst_ptr_ = ptr_enc_str_->inst;
- return 0;
-}
-
-void ACMG722::DestructEncoderSafe() {
- if (ptr_enc_str_ != NULL) {
- if (ptr_enc_str_->inst != NULL) {
- WebRtcG722_FreeEncoder(ptr_enc_str_->inst);
- ptr_enc_str_->inst = NULL;
- }
- }
- encoder_exist_ = false;
- encoder_initialized_ = false;
-}
-
-int16_t ACMG722::InternalCreateDecoder() {
- if (ptr_dec_str_ == NULL) {
- // this structure must be created at the costructor
- // if it is still NULL then there is a probelm and
- // we dont continue
- return -1;
- }
-
- WebRtcG722_CreateDecoder(&ptr_dec_str_->inst);
- if (ptr_dec_str_->inst == NULL) {
- return -1;
- }
- decoder_inst_ptr_ = ptr_dec_str_->inst;
- return 0;
-}
-
-void ACMG722::DestructDecoderSafe() {
- decoder_exist_ = false;
- decoder_initialized_ = false;
- if (ptr_dec_str_ != NULL) {
- if (ptr_dec_str_->inst != NULL) {
- WebRtcG722_FreeDecoder(ptr_dec_str_->inst);
- ptr_dec_str_->inst = NULL;
- }
- }
-}
-
-void ACMG722::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WebRtcG722_FreeEncoder(static_cast<G722EncInst*>(ptr_inst));
- }
- return;
-}
-
-// Split the stereo packet and place left and right channel after each other
-// in the payload vector.
-void ACMG722::SplitStereoPacket(uint8_t* payload, int32_t* payload_length) {
- uint8_t right_byte;
-
- // Check for valid inputs.
- assert(payload != NULL);
- assert(*payload_length > 0);
-
- // Regroup the 4 bits/sample so to |l1 l2| |r1 r2| |l3 l4| |r3 r4| ...,
- // where "lx" is 4 bits representing left sample number x, and "rx" right
- // sample. Two samples fits in one byte, represented with |...|.
- for (int i = 0; i < *payload_length; i += 2) {
- right_byte = ((payload[i] & 0x0F) << 4) + (payload[i + 1] & 0x0F);
- payload[i] = (payload[i] & 0xF0) + (payload[i + 1] >> 4);
- payload[i + 1] = right_byte;
- }
-
- // Move one byte representing right channel each loop, and place it at the
- // end of the bytestream vector. After looping the data is reordered to:
- // |l1 l2| |l3 l4| ... |l(N-1) lN| |r1 r2| |r3 r4| ... |r(N-1) r(N)|,
- // where N is the total number of samples.
- for (int i = 0; i < *payload_length / 2; i++) {
- right_byte = payload[i + 1];
- memmove(&payload[i + 1], &payload[i + 2], *payload_length - i - 2);
- payload[*payload_length - 1] = right_byte;
- }
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g722.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g722.h
deleted file mode 100644
index cf7ebe1e223..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g722.h
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-typedef struct WebRtcG722EncInst G722EncInst;
-typedef struct WebRtcG722DecInst G722DecInst;
-
-namespace webrtc {
-
-namespace acm1 {
-
-// forward declaration
-struct ACMG722EncStr;
-struct ACMG722DecStr;
-
-class ACMG722 : public ACMGenericCodec {
- public:
- explicit ACMG722(int16_t codec_id);
- virtual ~ACMG722();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual int32_t Add10MsDataSafe(const uint32_t timestamp,
- const int16_t* data,
- const uint16_t length_smpl,
- const uint8_t audio_channel) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- virtual void SplitStereoPacket(uint8_t* payload,
- int32_t* payload_length) OVERRIDE;
-
- ACMG722EncStr* ptr_enc_str_;
- ACMG722DecStr* ptr_dec_str_;
-
- G722EncInst* encoder_inst_ptr_;
- G722EncInst* encoder_inst_ptr_right_; // Prepared for stereo
- G722DecInst* decoder_inst_ptr_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221.cc
deleted file mode 100644
index ed172fd3e1b..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221.cc
+++ /dev/null
@@ -1,500 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_g7221.h"
-
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_G722_1
-// NOTE! G.722.1 is not included in the open-source package. The following
-// interface file is needed:
-//
-// /modules/audio_coding/codecs/g7221/main/interface/g7221_interface.h
-//
-// The API in the header file should match the one below.
-//
-// int16_t WebRtcG7221_CreateEnc16(G722_1_16_encinst_t_** enc_inst);
-// int16_t WebRtcG7221_CreateEnc24(G722_1_24_encinst_t_** enc_inst);
-// int16_t WebRtcG7221_CreateEnc32(G722_1_32_encinst_t_** enc_inst);
-// int16_t WebRtcG7221_CreateDec16(G722_1_16_decinst_t_** dec_inst);
-// int16_t WebRtcG7221_CreateDec24(G722_1_24_decinst_t_** dec_inst);
-// int16_t WebRtcG7221_CreateDec32(G722_1_32_decinst_t_** dec_inst);
-//
-// int16_t WebRtcG7221_FreeEnc16(G722_1_16_encinst_t_** enc_inst);
-// int16_t WebRtcG7221_FreeEnc24(G722_1_24_encinst_t_** enc_inst);
-// int16_t WebRtcG7221_FreeEnc32(G722_1_32_encinst_t_** enc_inst);
-// int16_t WebRtcG7221_FreeDec16(G722_1_16_decinst_t_** dec_inst);
-// int16_t WebRtcG7221_FreeDec24(G722_1_24_decinst_t_** dec_inst);
-// int16_t WebRtcG7221_FreeDec32(G722_1_32_decinst_t_** dec_inst);
-//
-// int16_t WebRtcG7221_EncoderInit16(G722_1_16_encinst_t_* enc_inst);
-// int16_t WebRtcG7221_EncoderInit24(G722_1_24_encinst_t_* enc_inst);
-// int16_t WebRtcG7221_EncoderInit32(G722_1_32_encinst_t_* enc_inst);
-// int16_t WebRtcG7221_DecoderInit16(G722_1_16_decinst_t_* dec_inst);
-// int16_t WebRtcG7221_DecoderInit24(G722_1_24_decinst_t_* dec_inst);
-// int16_t WebRtcG7221_DecoderInit32(G722_1_32_decinst_t_* dec_inst);
-//
-// int16_t WebRtcG7221_Encode16(G722_1_16_encinst_t_* enc_inst,
-// int16_t* input,
-// int16_t len,
-// int16_t* output);
-// int16_t WebRtcG7221_Encode24(G722_1_24_encinst_t_* enc_inst,
-// int16_t* input,
-// int16_t len,
-// int16_t* output);
-// int16_t WebRtcG7221_Encode32(G722_1_32_encinst_t_* enc_inst,
-// int16_t* input,
-// int16_t len,
-// int16_t* output);
-//
-// int16_t WebRtcG7221_Decode16(G722_1_16_decinst_t_* dec_inst,
-// int16_t* bitstream,
-// int16_t len,
-// int16_t* output);
-// int16_t WebRtcG7221_Decode24(G722_1_24_decinst_t_* dec_inst,
-// int16_t* bitstream,
-// int16_t len,
-// int16_t* output);
-// int16_t WebRtcG7221_Decode32(G722_1_32_decinst_t_* dec_inst,
-// int16_t* bitstream,
-// int16_t len,
-// int16_t* output);
-//
-// int16_t WebRtcG7221_DecodePlc16(G722_1_16_decinst_t_* dec_inst,
-// int16_t* output,
-// int16_t nr_lost_frames);
-// int16_t WebRtcG7221_DecodePlc24(G722_1_24_decinst_t_* dec_inst,
-// int16_t* output,
-// int16_t nr_lost_frames);
-// int16_t WebRtcG7221_DecodePlc32(G722_1_32_decinst_t_* dec_inst,
-// int16_t* output,
-// int16_t nr_lost_frames);
-#include "g7221_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_G722_1
-
-ACMG722_1::ACMG722_1(int16_t /* codec_id */)
- : operational_rate_(-1),
- encoder_inst_ptr_(NULL),
- encoder_inst_ptr_right_(NULL),
- decoder_inst_ptr_(NULL),
- encoder_inst16_ptr_(NULL),
- encoder_inst16_ptr_right_(NULL),
- encoder_inst24_ptr_(NULL),
- encoder_inst24_ptr_right_(NULL),
- encoder_inst32_ptr_(NULL),
- encoder_inst32_ptr_right_(NULL),
- decoder_inst16_ptr_(NULL),
- decoder_inst24_ptr_(NULL),
- decoder_inst32_ptr_(NULL) {
- return;
-}
-
-ACMG722_1::~ACMG722_1() {
- return;
-}
-
-int16_t ACMG722_1::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMG722_1::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMG722_1::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMG722_1::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMG722_1::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMG722_1::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMG722_1::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMG722_1::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMG722_1::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMG722_1::DestructDecoderSafe() {
- return;
-}
-
-void ACMG722_1::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-#else //===================== Actual Implementation =======================
-ACMG722_1::ACMG722_1(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- encoder_inst_ptr_right_(NULL),
- decoder_inst_ptr_(NULL),
- encoder_inst16_ptr_(NULL),
- encoder_inst16_ptr_right_(NULL),
- encoder_inst24_ptr_(NULL),
- encoder_inst24_ptr_right_(NULL),
- encoder_inst32_ptr_(NULL),
- encoder_inst32_ptr_right_(NULL),
- decoder_inst16_ptr_(NULL),
- decoder_inst24_ptr_(NULL),
- decoder_inst32_ptr_(NULL) {
- codec_id_ = codec_id;
- if (codec_id_ == ACMCodecDB::kG722_1_16) {
- operational_rate_ = 16000;
- } else if (codec_id_ == ACMCodecDB::kG722_1_24) {
- operational_rate_ = 24000;
- } else if (codec_id_ == ACMCodecDB::kG722_1_32) {
- operational_rate_ = 32000;
- } else {
- operational_rate_ = -1;
- }
- return;
-}
-
-ACMG722_1::~ACMG722_1() {
- if (encoder_inst_ptr_ != NULL) {
- delete encoder_inst_ptr_;
- encoder_inst_ptr_ = NULL;
- }
- if (encoder_inst_ptr_right_ != NULL) {
- delete encoder_inst_ptr_right_;
- encoder_inst_ptr_right_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- delete decoder_inst_ptr_;
- decoder_inst_ptr_ = NULL;
- }
-
- switch (operational_rate_) {
- case 16000: {
- encoder_inst16_ptr_ = NULL;
- encoder_inst16_ptr_right_ = NULL;
- decoder_inst16_ptr_ = NULL;
- break;
- }
- case 24000: {
- encoder_inst24_ptr_ = NULL;
- encoder_inst24_ptr_right_ = NULL;
- decoder_inst24_ptr_ = NULL;
- break;
- }
- case 32000: {
- encoder_inst32_ptr_ = NULL;
- encoder_inst32_ptr_right_ = NULL;
- decoder_inst32_ptr_ = NULL;
- break;
- }
- default: {
- break;
- }
- }
- return;
-}
-
-int16_t ACMG722_1::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- int16_t left_channel[320];
- int16_t right_channel[320];
- int16_t len_in_bytes;
- int16_t out_bits[160];
-
- // If stereo, split input signal in left and right channel before encoding
- if (num_channels_ == 2) {
- for (int i = 0, j = 0; i < frame_len_smpl_ * 2; i += 2, j++) {
- left_channel[j] = in_audio_[in_audio_ix_read_ + i];
- right_channel[j] = in_audio_[in_audio_ix_read_ + i + 1];
- }
- } else {
- memcpy(left_channel, &in_audio_[in_audio_ix_read_], 320);
- }
-
- switch (operational_rate_) {
- case 16000: {
- len_in_bytes = WebRtcG7221_Encode16(encoder_inst16_ptr_, left_channel,
- 320, &out_bits[0]);
- if (num_channels_ == 2) {
- len_in_bytes += WebRtcG7221_Encode16(encoder_inst16_ptr_right_,
- right_channel, 320,
- &out_bits[len_in_bytes / 2]);
- }
- break;
- }
- case 24000: {
- len_in_bytes = WebRtcG7221_Encode24(encoder_inst24_ptr_, left_channel,
- 320, &out_bits[0]);
- if (num_channels_ == 2) {
- len_in_bytes += WebRtcG7221_Encode24(encoder_inst24_ptr_right_,
- right_channel, 320,
- &out_bits[len_in_bytes / 2]);
- }
- break;
- }
- case 32000: {
- len_in_bytes = WebRtcG7221_Encode32(encoder_inst32_ptr_, left_channel,
- 320, &out_bits[0]);
- if (num_channels_ == 2) {
- len_in_bytes += WebRtcG7221_Encode32(encoder_inst32_ptr_right_,
- right_channel, 320,
- &out_bits[len_in_bytes / 2]);
- }
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitEncode: Wrong rate for G722_1.");
- return -1;
- }
- }
- memcpy(bitstream, out_bits, len_in_bytes);
- *bitstream_len_byte = len_in_bytes;
-
- // increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += 320 * num_channels_;
- return *bitstream_len_byte;
-}
-
-int16_t ACMG722_1::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMG722_1::InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) {
- int16_t ret;
-
- switch (operational_rate_) {
- case 16000: {
- ret = WebRtcG7221_EncoderInit16(encoder_inst16_ptr_right_);
- if (ret < 0) {
- return ret;
- }
- return WebRtcG7221_EncoderInit16(encoder_inst16_ptr_);
- }
- case 24000: {
- ret = WebRtcG7221_EncoderInit24(encoder_inst24_ptr_right_);
- if (ret < 0) {
- return ret;
- }
- return WebRtcG7221_EncoderInit24(encoder_inst24_ptr_);
- }
- case 32000: {
- ret = WebRtcG7221_EncoderInit32(encoder_inst32_ptr_right_);
- if (ret < 0) {
- return ret;
- }
- return WebRtcG7221_EncoderInit32(encoder_inst32_ptr_);
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding,
- unique_id_, "InternalInitEncoder: Wrong rate for G722_1.");
- return -1;
- }
- }
-}
-
-int16_t ACMG722_1::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- switch (operational_rate_) {
- case 16000: {
- return WebRtcG7221_DecoderInit16(decoder_inst16_ptr_);
- }
- case 24000: {
- return WebRtcG7221_DecoderInit24(decoder_inst24_ptr_);
- }
- case 32000: {
- return WebRtcG7221_DecoderInit32(decoder_inst32_ptr_);
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitDecoder: Wrong rate for G722_1.");
- return -1;
- }
- }
-}
-
-int32_t ACMG722_1::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- // Todo:
- // log error
- return -1;
- }
- // NetEq has an array of pointers to WebRtcNetEQ_CodecDef.
- // Get an entry of that array (neteq wrapper will allocate memory)
- // by calling "netEq->CodecDef", where "NETEQ_CODEC_G722_1_XX" would
- // be the index of the entry.
- // Fill up the given structure by calling
- // "SET_CODEC_PAR" & "SET_G722_1_XX_FUNCTION."
- // Then return the structure back to NetEQ to add the codec to it's
- // database.
- switch (operational_rate_) {
- case 16000: {
- SET_CODEC_PAR((codec_def), kDecoderG722_1_16, codec_inst.pltype,
- decoder_inst16_ptr_, 16000);
- SET_G722_1_16_FUNCTIONS((codec_def));
- break;
- }
- case 24000: {
- SET_CODEC_PAR((codec_def), kDecoderG722_1_24, codec_inst.pltype,
- decoder_inst24_ptr_, 16000);
- SET_G722_1_24_FUNCTIONS((codec_def));
- break;
- }
- case 32000: {
- SET_CODEC_PAR((codec_def), kDecoderG722_1_32, codec_inst.pltype,
- decoder_inst32_ptr_, 16000);
- SET_G722_1_32_FUNCTIONS((codec_def));
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CodecDef: Wrong rate for G722_1.");
- return -1;
- }
- }
- return 0;
-}
-
-ACMGenericCodec* ACMG722_1::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMG722_1::InternalCreateEncoder() {
- if ((encoder_inst_ptr_ == NULL) || (encoder_inst_ptr_right_ == NULL)) {
- return -1;
- }
- switch (operational_rate_) {
- case 16000: {
- WebRtcG7221_CreateEnc16(&encoder_inst16_ptr_);
- WebRtcG7221_CreateEnc16(&encoder_inst16_ptr_right_);
- break;
- }
- case 24000: {
- WebRtcG7221_CreateEnc24(&encoder_inst24_ptr_);
- WebRtcG7221_CreateEnc24(&encoder_inst24_ptr_right_);
- break;
- }
- case 32000: {
- WebRtcG7221_CreateEnc32(&encoder_inst32_ptr_);
- WebRtcG7221_CreateEnc32(&encoder_inst32_ptr_right_);
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateEncoder: Wrong rate for G722_1.");
- return -1;
- }
- }
- return 0;
-}
-
-void ACMG722_1::DestructEncoderSafe() {
- encoder_exist_ = false;
- encoder_initialized_ = false;
- if (encoder_inst_ptr_ != NULL) {
- delete encoder_inst_ptr_;
- encoder_inst_ptr_ = NULL;
- }
- if (encoder_inst_ptr_right_ != NULL) {
- delete encoder_inst_ptr_right_;
- encoder_inst_ptr_right_ = NULL;
- }
- encoder_inst16_ptr_ = NULL;
- encoder_inst24_ptr_ = NULL;
- encoder_inst32_ptr_ = NULL;
-}
-
-int16_t ACMG722_1::InternalCreateDecoder() {
- if (decoder_inst_ptr_ == NULL) {
- return -1;
- }
- switch (operational_rate_) {
- case 16000: {
- WebRtcG7221_CreateDec16(&decoder_inst16_ptr_);
- break;
- }
- case 24000: {
- WebRtcG7221_CreateDec24(&decoder_inst24_ptr_);
- break;
- }
- case 32000: {
- WebRtcG7221_CreateDec32(&decoder_inst32_ptr_);
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateDecoder: Wrong rate for G722_1.");
- return -1;
- }
- }
- return 0;
-}
-
-void ACMG722_1::DestructDecoderSafe() {
- decoder_exist_ = false;
- decoder_initialized_ = false;
- if (decoder_inst_ptr_ != NULL) {
- delete decoder_inst_ptr_;
- decoder_inst_ptr_ = NULL;
- }
- decoder_inst16_ptr_ = NULL;
- decoder_inst24_ptr_ = NULL;
- decoder_inst32_ptr_ = NULL;
-}
-
-void ACMG722_1::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- delete ptr_inst;
- }
- return;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221.h
deleted file mode 100644
index 8ea66742c97..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221.h
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G7221_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G7221_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct G722_1_16_encinst_t_;
-struct G722_1_16_decinst_t_;
-struct G722_1_24_encinst_t_;
-struct G722_1_24_decinst_t_;
-struct G722_1_32_encinst_t_;
-struct G722_1_32_decinst_t_;
-struct G722_1_Inst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMG722_1: public ACMGenericCodec {
- public:
- explicit ACMG722_1(int16_t codec_id);
- ~ACMG722_1();
-
- // for FEC
- ACMGenericCodec* CreateInstance(void);
-
- int16_t InternalEncode(uint8_t* bitstream, int16_t* bitstream_len_byte);
-
- int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
-
- int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
-
- protected:
- int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio, int16_t* audio_samples,
- int8_t* speech_type);
-
- int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
-
- void DestructEncoderSafe();
-
- void DestructDecoderSafe();
-
- int16_t InternalCreateEncoder();
-
- int16_t InternalCreateDecoder();
-
- void InternalDestructEncoderInst(void* ptr_inst);
-
- int32_t operational_rate_;
-
- G722_1_Inst_t_* encoder_inst_ptr_;
- G722_1_Inst_t_* encoder_inst_ptr_right_; // Used in stereo mode
- G722_1_Inst_t_* decoder_inst_ptr_;
-
- // Only one set of these pointer is valid at any instance
- G722_1_16_encinst_t_* encoder_inst16_ptr_;
- G722_1_16_encinst_t_* encoder_inst16_ptr_right_;
- G722_1_24_encinst_t_* encoder_inst24_ptr_;
- G722_1_24_encinst_t_* encoder_inst24_ptr_right_;
- G722_1_32_encinst_t_* encoder_inst32_ptr_;
- G722_1_32_encinst_t_* encoder_inst32_ptr_right_;
-
- // Only one of these pointer is valid at any instance
- G722_1_16_decinst_t_* decoder_inst16_ptr_;
- G722_1_24_decinst_t_* decoder_inst24_ptr_;
- G722_1_32_decinst_t_* decoder_inst32_ptr_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G7221_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221c.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221c.cc
deleted file mode 100644
index 96caba0a08f..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221c.cc
+++ /dev/null
@@ -1,510 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_g7221c.h"
-
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_G722_1C
-// NOTE! G.722.1C is not included in the open-source package. The following
-// interface file is needed:
-//
-// /modules/audio_coding/codecs/g7221c/main/interface/g7221c_interface.h
-//
-// The API in the header file should match the one below.
-//
-
-// int16_t WebRtcG7221C_CreateEnc24(G722_1C_24_encinst_t_** enc_inst);
-// int16_t WebRtcG7221C_CreateEnc32(G722_1C_32_encinst_t_** enc_inst);
-// int16_t WebRtcG7221C_CreateEnc48(G722_1C_48_encinst_t_** enc_inst);
-// int16_t WebRtcG7221C_CreateDec24(G722_1C_24_decinst_t_** dec_inst);
-// int16_t WebRtcG7221C_CreateDec32(G722_1C_32_decinst_t_** dec_inst);
-// int16_t WebRtcG7221C_CreateDec48(G722_1C_48_decinst_t_** dec_inst);
-//
-// int16_t WebRtcG7221C_FreeEnc24(G722_1C_24_encinst_t_** enc_inst);
-// int16_t WebRtcG7221C_FreeEnc32(G722_1C_32_encinst_t_** enc_inst);
-// int16_t WebRtcG7221C_FreeEnc48(G722_1C_48_encinst_t_** enc_inst);
-// int16_t WebRtcG7221C_FreeDec24(G722_1C_24_decinst_t_** dec_inst);
-// int16_t WebRtcG7221C_FreeDec32(G722_1C_32_decinst_t_** dec_inst);
-// int16_t WebRtcG7221C_FreeDec48(G722_1C_48_decinst_t_** dec_inst);
-//
-// int16_t WebRtcG7221C_EncoderInit24(G722_1C_24_encinst_t_* enc_inst);
-// int16_t WebRtcG7221C_EncoderInit32(G722_1C_32_encinst_t_* enc_inst);
-// int16_t WebRtcG7221C_EncoderInit48(G722_1C_48_encinst_t_* enc_inst);
-// int16_t WebRtcG7221C_DecoderInit24(G722_1C_24_decinst_t_* dec_inst);
-// int16_t WebRtcG7221C_DecoderInit32(G722_1C_32_decinst_t_* dec_inst);
-// int16_t WebRtcG7221C_DecoderInit48(G722_1C_48_decinst_t_* dec_inst);
-//
-// int16_t WebRtcG7221C_Encode24(G722_1C_24_encinst_t_* enc_inst,
-// int16_t* input,
-// int16_t len,
-// int16_t* output);
-// int16_t WebRtcG7221C_Encode32(G722_1C_32_encinst_t_* enc_inst,
-// int16_t* input,
-// int16_t len,
-// int16_t* output);
-// int16_t WebRtcG7221C_Encode48(G722_1C_48_encinst_t_* enc_inst,
-// int16_t* input,
-// int16_t len,
-// int16_t* output);
-//
-// int16_t WebRtcG7221C_Decode24(G722_1C_24_decinst_t_* dec_inst,
-// int16_t* bitstream,
-// int16_t len,
-// int16_t* output);
-// int16_t WebRtcG7221C_Decode32(G722_1C_32_decinst_t_* dec_inst,
-// int16_t* bitstream,
-// int16_t len,
-// int16_t* output);
-// int16_t WebRtcG7221C_Decode48(G722_1C_48_decinst_t_* dec_inst,
-// int16_t* bitstream,
-// int16_t len,
-// int16_t* output);
-//
-// int16_t WebRtcG7221C_DecodePlc24(G722_1C_24_decinst_t_* dec_inst,
-// int16_t* output,
-// int16_t nr_lost_frames);
-// int16_t WebRtcG7221C_DecodePlc32(G722_1C_32_decinst_t_* dec_inst,
-// int16_t* output,
-// int16_t nr_lost_frames);
-// int16_t WebRtcG7221C_DecodePlc48(G722_1C_48_decinst_t_* dec_inst,
-// int16_t* output,
-// int16_t nr_lost_frames);
-#include "g7221c_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_G722_1C
-
-ACMG722_1C::ACMG722_1C(int16_t /* codec_id */)
- : operational_rate_(-1),
- encoder_inst_ptr_(NULL),
- encoder_inst_ptr_right_(NULL),
- decoder_inst_ptr_(NULL),
- encoder_inst24_ptr_(NULL),
- encoder_inst24_ptr_right_(NULL),
- encoder_inst32_ptr_(NULL),
- encoder_inst32_ptr_right_(NULL),
- encoder_inst48_ptr_(NULL),
- encoder_inst48_ptr_right_(NULL),
- decoder_inst24_ptr_(NULL),
- decoder_inst32_ptr_(NULL),
- decoder_inst48_ptr_(NULL) {
- return;
-}
-
-ACMG722_1C::~ACMG722_1C() {
- return;
-}
-
-int16_t ACMG722_1C::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMG722_1C::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMG722_1C::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMG722_1C::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMG722_1C::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMG722_1C::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMG722_1C::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMG722_1C::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMG722_1C::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMG722_1C::DestructDecoderSafe() {
- return;
-}
-
-void ACMG722_1C::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-#else //===================== Actual Implementation =======================
-ACMG722_1C::ACMG722_1C(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- encoder_inst_ptr_right_(NULL),
- decoder_inst_ptr_(NULL),
- encoder_inst24_ptr_(NULL),
- encoder_inst24_ptr_right_(NULL),
- encoder_inst32_ptr_(NULL),
- encoder_inst32_ptr_right_(NULL),
- encoder_inst48_ptr_(NULL),
- encoder_inst48_ptr_right_(NULL),
- decoder_inst24_ptr_(NULL),
- decoder_inst32_ptr_(NULL),
- decoder_inst48_ptr_(NULL) {
- codec_id_ = codec_id;
- if (codec_id_ == ACMCodecDB::kG722_1C_24) {
- operational_rate_ = 24000;
- } else if (codec_id_ == ACMCodecDB::kG722_1C_32) {
- operational_rate_ = 32000;
- } else if (codec_id_ == ACMCodecDB::kG722_1C_48) {
- operational_rate_ = 48000;
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Wrong codec id for G722_1c.");
- operational_rate_ = -1;
- }
- return;
-}
-
-ACMG722_1C::~ACMG722_1C() {
- if (encoder_inst_ptr_ != NULL) {
- delete encoder_inst_ptr_;
- encoder_inst_ptr_ = NULL;
- }
- if (encoder_inst_ptr_right_ != NULL) {
- delete encoder_inst_ptr_right_;
- encoder_inst_ptr_right_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- delete decoder_inst_ptr_;
- decoder_inst_ptr_ = NULL;
- }
-
- switch (operational_rate_) {
- case 24000: {
- encoder_inst24_ptr_ = NULL;
- encoder_inst24_ptr_right_ = NULL;
- decoder_inst24_ptr_ = NULL;
- break;
- }
- case 32000: {
- encoder_inst32_ptr_ = NULL;
- encoder_inst32_ptr_right_ = NULL;
- decoder_inst32_ptr_ = NULL;
- break;
- }
- case 48000: {
- encoder_inst48_ptr_ = NULL;
- encoder_inst48_ptr_right_ = NULL;
- decoder_inst48_ptr_ = NULL;
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Wrong rate for G722_1c.");
- break;
- }
- }
- return;
-}
-
-int16_t ACMG722_1C::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- int16_t left_channel[640];
- int16_t right_channel[640];
- int16_t len_in_bytes;
- int16_t out_bits[240];
-
- // If stereo, split input signal in left and right channel before encoding
- if (num_channels_ == 2) {
- for (int i = 0, j = 0; i < frame_len_smpl_ * 2; i += 2, j++) {
- left_channel[j] = in_audio_[in_audio_ix_read_ + i];
- right_channel[j] = in_audio_[in_audio_ix_read_ + i + 1];
- }
- } else {
- memcpy(left_channel, &in_audio_[in_audio_ix_read_], 640);
- }
-
- switch (operational_rate_) {
- case 24000: {
- len_in_bytes = WebRtcG7221C_Encode24(encoder_inst24_ptr_, left_channel,
- 640, &out_bits[0]);
- if (num_channels_ == 2) {
- len_in_bytes += WebRtcG7221C_Encode24(encoder_inst24_ptr_right_,
- right_channel, 640,
- &out_bits[len_in_bytes / 2]);
- }
- break;
- }
- case 32000: {
- len_in_bytes = WebRtcG7221C_Encode32(encoder_inst32_ptr_, left_channel,
- 640, &out_bits[0]);
- if (num_channels_ == 2) {
- len_in_bytes += WebRtcG7221C_Encode32(encoder_inst32_ptr_right_,
- right_channel, 640,
- &out_bits[len_in_bytes / 2]);
- }
- break;
- }
- case 48000: {
- len_in_bytes = WebRtcG7221C_Encode48(encoder_inst48_ptr_, left_channel,
- 640, &out_bits[0]);
- if (num_channels_ == 2) {
- len_in_bytes += WebRtcG7221C_Encode48(encoder_inst48_ptr_right_,
- right_channel, 640,
- &out_bits[len_in_bytes / 2]);
- }
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalEncode: Wrong rate for G722_1c.");
- return -1;
- }
- }
-
- memcpy(bitstream, out_bits, len_in_bytes);
- *bitstream_len_byte = len_in_bytes;
-
- // increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += 640 * num_channels_;
-
- return *bitstream_len_byte;
-}
-
-int16_t ACMG722_1C::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMG722_1C::InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) {
- int16_t ret;
-
- switch (operational_rate_) {
- case 24000: {
- ret = WebRtcG7221C_EncoderInit24(encoder_inst24_ptr_right_);
- if (ret < 0) {
- return ret;
- }
- return WebRtcG7221C_EncoderInit24(encoder_inst24_ptr_);
- }
- case 32000: {
- ret = WebRtcG7221C_EncoderInit32(encoder_inst32_ptr_right_);
- if (ret < 0) {
- return ret;
- }
- return WebRtcG7221C_EncoderInit32(encoder_inst32_ptr_);
- }
- case 48000: {
- ret = WebRtcG7221C_EncoderInit48(encoder_inst48_ptr_right_);
- if (ret < 0) {
- return ret;
- }
- return WebRtcG7221C_EncoderInit48(encoder_inst48_ptr_);
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitEncode: Wrong rate for G722_1c.");
- return -1;
- }
- }
-}
-
-int16_t ACMG722_1C::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- switch (operational_rate_) {
- case 24000: {
- return WebRtcG7221C_DecoderInit24(decoder_inst24_ptr_);
- }
- case 32000: {
- return WebRtcG7221C_DecoderInit32(decoder_inst32_ptr_);
- }
- case 48000: {
- return WebRtcG7221C_DecoderInit48(decoder_inst48_ptr_);
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitDecoder: Wrong rate for G722_1c.");
- return -1;
- }
- }
-}
-
-int32_t ACMG722_1C::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CodeDef: decoder not initialized for G722_1c");
- return -1;
- }
- // NetEq has an array of pointers to WebRtcNetEQ_CodecDef.
- // get an entry of that array (neteq wrapper will allocate memory)
- // by calling "netEq->CodecDef", where "NETEQ_CODEC_G722_1_XX" would
- // be the index of the entry.
- // Fill up the given structure by calling
- // "SET_CODEC_PAR" & "SET_G722_1_XX_FUNCTION."
- // Then return the structure back to NetEQ to add the codec to it's
- // database.
- switch (operational_rate_) {
- case 24000: {
- SET_CODEC_PAR((codec_def), kDecoderG722_1C_24, codec_inst.pltype,
- decoder_inst24_ptr_, 32000);
- SET_G722_1C_24_FUNCTIONS((codec_def));
- break;
- }
- case 32000: {
- SET_CODEC_PAR((codec_def), kDecoderG722_1C_32, codec_inst.pltype,
- decoder_inst32_ptr_, 32000);
- SET_G722_1C_32_FUNCTIONS((codec_def));
- break;
- }
- case 48000: {
- SET_CODEC_PAR((codec_def), kDecoderG722_1C_32, codec_inst.pltype,
- decoder_inst48_ptr_, 32000);
- SET_G722_1C_48_FUNCTIONS((codec_def));
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CodeDef: Wrong rate for G722_1c.");
- return -1;
- }
- }
- return 0;
-}
-
-ACMGenericCodec*
-ACMG722_1C::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMG722_1C::InternalCreateEncoder() {
- if ((encoder_inst_ptr_ == NULL) || (encoder_inst_ptr_right_ == NULL)) {
- return -1;
- }
- switch (operational_rate_) {
- case 24000: {
- WebRtcG7221C_CreateEnc24(&encoder_inst24_ptr_);
- WebRtcG7221C_CreateEnc24(&encoder_inst24_ptr_right_);
- break;
- }
- case 32000: {
- WebRtcG7221C_CreateEnc32(&encoder_inst32_ptr_);
- WebRtcG7221C_CreateEnc32(&encoder_inst32_ptr_right_);
- break;
- }
- case 48000: {
- WebRtcG7221C_CreateEnc48(&encoder_inst48_ptr_);
- WebRtcG7221C_CreateEnc48(&encoder_inst48_ptr_right_);
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateEncoder: Wrong rate for G722_1c.");
- return -1;
- }
- }
- return 0;
-}
-
-void ACMG722_1C::DestructEncoderSafe() {
- encoder_exist_ = false;
- encoder_initialized_ = false;
- if (encoder_inst_ptr_ != NULL) {
- delete encoder_inst_ptr_;
- encoder_inst_ptr_ = NULL;
- }
- if (encoder_inst_ptr_right_ != NULL) {
- delete encoder_inst_ptr_right_;
- encoder_inst_ptr_right_ = NULL;
- }
- encoder_inst24_ptr_ = NULL;
- encoder_inst32_ptr_ = NULL;
- encoder_inst48_ptr_ = NULL;
-}
-
-int16_t ACMG722_1C::InternalCreateDecoder() {
- if (decoder_inst_ptr_ == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateEncoder: cannot create decoder");
- return -1;
- }
- switch (operational_rate_) {
- case 24000: {
- WebRtcG7221C_CreateDec24(&decoder_inst24_ptr_);
- break;
- }
- case 32000: {
- WebRtcG7221C_CreateDec32(&decoder_inst32_ptr_);
- break;
- }
- case 48000: {
- WebRtcG7221C_CreateDec48(&decoder_inst48_ptr_);
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateEncoder: Wrong rate for G722_1c.");
- return -1;
- }
- }
- return 0;
-}
-
-void ACMG722_1C::DestructDecoderSafe() {
- decoder_exist_ = false;
- decoder_initialized_ = false;
- if (decoder_inst_ptr_ != NULL) {
- delete decoder_inst_ptr_;
- decoder_inst_ptr_ = NULL;
- }
- decoder_inst24_ptr_ = NULL;
- decoder_inst32_ptr_ = NULL;
- decoder_inst48_ptr_ = NULL;
-}
-
-void ACMG722_1C::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- delete ptr_inst;
- }
- return;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221c.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221c.h
deleted file mode 100644
index d8875aa2fbf..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7221c.h
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G7221C_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G7221C_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct G722_1C_24_encinst_t_;
-struct G722_1C_24_decinst_t_;
-struct G722_1C_32_encinst_t_;
-struct G722_1C_32_decinst_t_;
-struct G722_1C_48_encinst_t_;
-struct G722_1C_48_decinst_t_;
-struct G722_1_Inst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMG722_1C : public ACMGenericCodec {
- public:
- explicit ACMG722_1C(int16_t codec_id);
- ~ACMG722_1C();
-
- // for FEC
- ACMGenericCodec* CreateInstance(void);
-
- int16_t InternalEncode(
- uint8_t* bitstream,
- int16_t* bitstream_len_byte);
-
- int16_t InternalInitEncoder(
- WebRtcACMCodecParams *codec_params);
-
- int16_t InternalInitDecoder(
- WebRtcACMCodecParams *codec_params);
-
- protected:
- int16_t DecodeSafe(
- uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type);
-
- int32_t CodecDef(
- WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
-
- void DestructEncoderSafe();
-
- void DestructDecoderSafe();
-
- int16_t InternalCreateEncoder();
-
- int16_t InternalCreateDecoder();
-
- void InternalDestructEncoderInst(
- void* ptr_inst);
-
- int32_t operational_rate_;
-
- G722_1_Inst_t_* encoder_inst_ptr_;
- G722_1_Inst_t_* encoder_inst_ptr_right_; // Used in stereo mode
- G722_1_Inst_t_* decoder_inst_ptr_;
-
- // Only one set of these pointer is valid at any instance
- G722_1C_24_encinst_t_* encoder_inst24_ptr_;
- G722_1C_24_encinst_t_* encoder_inst24_ptr_right_;
- G722_1C_32_encinst_t_* encoder_inst32_ptr_;
- G722_1C_32_encinst_t_* encoder_inst32_ptr_right_;
- G722_1C_48_encinst_t_* encoder_inst48_ptr_;
- G722_1C_48_encinst_t_* encoder_inst48_ptr_right_;
-
- // Only one of these pointer is valid at any instance
- G722_1C_24_decinst_t_* decoder_inst24_ptr_;
- G722_1C_32_decinst_t_* decoder_inst32_ptr_;
- G722_1C_48_decinst_t_* decoder_inst48_ptr_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc;
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G7221C_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g729.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g729.cc
deleted file mode 100644
index 406bb61e48d..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g729.cc
+++ /dev/null
@@ -1,366 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_g729.h"
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_G729
-// NOTE! G.729 is not included in the open-source package. Modify this file
-// or your codec API to match the function calls and names of used G.729 API
-// file.
-#include "g729_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_G729
-
-ACMG729::ACMG729(int16_t /* codec_id */)
-: encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL) {
- return;
-}
-
-ACMG729::~ACMG729() {
- return;
-}
-
-int16_t ACMG729::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMG729::EnableDTX() {
- return -1;
-}
-
-int16_t ACMG729::DisableDTX() {
- return -1;
-}
-
-int32_t ACMG729::ReplaceInternalDTXSafe(
- const bool /*replace_internal_dtx */) {
- return -1;
-}
-
-int32_t ACMG729::IsInternalDTXReplacedSafe(
- bool* /* internal_dtx_replaced */) {
- return -1;
-}
-
-int16_t ACMG729::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMG729::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMG729::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMG729::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMG729::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMG729::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMG729::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMG729::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMG729::DestructDecoderSafe() {
- return;
-}
-
-void ACMG729::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-#else //===================== Actual Implementation =======================
-ACMG729::ACMG729(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL) {
- codec_id_ = codec_id;
- has_internal_dtx_ = true;
- return;
-}
-
-ACMG729::~ACMG729() {
- if (encoder_inst_ptr_ != NULL) {
- // Delete encoder memory
- WebRtcG729_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- // Delete decoder memory
- WebRtcG729_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- return;
-}
-
-int16_t ACMG729::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- // Initialize before entering the loop
- int16_t num_encoded_samples = 0;
- int16_t tmp_len_byte = 0;
- int16_t vad_decision = 0;
- *bitstream_len_byte = 0;
- while (num_encoded_samples < frame_len_smpl_) {
- // Call G.729 encoder with pointer to encoder memory, input
- // audio, number of samples and bitsream
- tmp_len_byte = WebRtcG729_Encode(
- encoder_inst_ptr_, &in_audio_[in_audio_ix_read_], 80,
- (int16_t*)(&(bitstream[*bitstream_len_byte])));
-
- // increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += 80;
-
- // sanity check
- if (tmp_len_byte < 0) {
- // error has happened
- *bitstream_len_byte = 0;
- return -1;
- }
-
- // increment number of written bytes
- *bitstream_len_byte += tmp_len_byte;
- switch (tmp_len_byte) {
- case 0: {
- if (0 == num_encoded_samples) {
- // this is the first 10 ms in this packet and there is
- // no data generated, perhaps DTX is enabled and the
- // codec is not generating any bit-stream for this 10 ms.
- // we do not continue encoding this frame.
- return 0;
- }
- break;
- }
- case 2: {
- // check if G.729 internal DTX is enabled
- if (has_internal_dtx_ && dtx_enabled_) {
- vad_decision = 0;
- for (int16_t n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
- vad_label_[n] = vad_decision;
- }
- }
- // we got a SID and have to send out this packet no matter
- // how much audio we have encoded
- return *bitstream_len_byte;
- }
- case 10: {
- vad_decision = 1;
- // this is a valid length just continue encoding
- break;
- }
- default: {
- return -1;
- }
- }
-
- // update number of encoded samples
- num_encoded_samples += 80;
- }
-
- // update VAD decision vector
- if (has_internal_dtx_ && !vad_decision && dtx_enabled_) {
- for (int16_t n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
- vad_label_[n] = vad_decision;
- }
- }
-
- // done encoding, return number of encoded bytes
- return *bitstream_len_byte;
-}
-
-int16_t ACMG729::EnableDTX() {
- if (dtx_enabled_) {
- // DTX already enabled, do nothing
- return 0;
- } else if (encoder_exist_) {
- // Re-init the G.729 encoder to turn on DTX
- if (WebRtcG729_EncoderInit(encoder_inst_ptr_, 1) < 0) {
- return -1;
- }
- dtx_enabled_ = true;
- return 0;
- } else {
- return -1;
- }
-}
-
-int16_t ACMG729::DisableDTX() {
- if (!dtx_enabled_) {
- // DTX already dissabled, do nothing
- return 0;
- } else if (encoder_exist_) {
- // Re-init the G.729 decoder to turn off DTX
- if (WebRtcG729_EncoderInit(encoder_inst_ptr_, 0) < 0) {
- return -1;
- }
- dtx_enabled_ = false;
- return 0;
- } else {
- // encoder doesn't exists, therefore disabling is harmless
- return 0;
- }
-}
-
-int32_t ACMG729::ReplaceInternalDTXSafe(const bool replace_internal_dtx) {
- // This function is used to disable the G.729 built in DTX and use an
- // external instead.
-
- if (replace_internal_dtx == has_internal_dtx_) {
- // Make sure we keep the DTX/VAD setting if possible
- bool old_enable_dtx = dtx_enabled_;
- bool old_enable_vad = vad_enabled_;
- ACMVADMode old_mode = vad_mode_;
- if (replace_internal_dtx) {
- // Disable internal DTX before enabling external DTX
- DisableDTX();
- } else {
- // Disable external DTX before enabling internal
- ACMGenericCodec::DisableDTX();
- }
- has_internal_dtx_ = !replace_internal_dtx;
- int16_t status = SetVADSafe(old_enable_dtx, old_enable_vad, old_mode);
- // Check if VAD status has changed from inactive to active, or if error was
- // reported
- if (status == 1) {
- vad_enabled_ = true;
- return status;
- } else if (status < 0) {
- has_internal_dtx_ = replace_internal_dtx;
- return -1;
- }
- }
- return 0;
-}
-
-int32_t ACMG729::IsInternalDTXReplacedSafe(bool* internal_dtx_replaced) {
- // Get status of wether DTX is replaced or not
- *internal_dtx_replaced = !has_internal_dtx_;
- return 0;
-}
-
-int16_t ACMG729::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- // This function is not used. G.729 decoder is called from inside NetEQ
- return 0;
-}
-
-int16_t ACMG729::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
- // Init G.729 encoder
- return WebRtcG729_EncoderInit(encoder_inst_ptr_,
- ((codec_params->enable_dtx) ? 1 : 0));
-}
-
-int16_t ACMG729::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // Init G.729 decoder
- return WebRtcG729_DecoderInit(decoder_inst_ptr_);
-}
-
-int32_t ACMG729::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- // Todo:
- // log error
- return -1;
- }
-
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_G729_FUNCTION."
- // Then call NetEQ to add the codec to it's
- // database.
- SET_CODEC_PAR((codec_def), kDecoderG729, codec_inst.pltype, decoder_inst_ptr_,
- 8000);
- SET_G729_FUNCTIONS((codec_def));
- return 0;
-}
-
-ACMGenericCodec* ACMG729::CreateInstance(void) {
- // Function not used
- return NULL;
-}
-
-int16_t ACMG729::InternalCreateEncoder() {
- // Create encoder memory
- return WebRtcG729_CreateEnc(&encoder_inst_ptr_);
-}
-
-void ACMG729::DestructEncoderSafe() {
- // Free encoder memory
- encoder_exist_ = false;
- encoder_initialized_ = false;
- if (encoder_inst_ptr_ != NULL) {
- WebRtcG729_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
-}
-
-int16_t ACMG729::InternalCreateDecoder() {
- // Create decoder memory
- return WebRtcG729_CreateDec(&decoder_inst_ptr_);
-}
-
-void ACMG729::DestructDecoderSafe() {
- // Free decoder memory
- decoder_exist_ = false;
- decoder_initialized_ = false;
- if (decoder_inst_ptr_ != NULL) {
- WebRtcG729_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
-}
-
-void ACMG729::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WebRtcG729_FreeEnc((G729_encinst_t_*) ptr_inst);
- }
- return;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g729.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g729.h
deleted file mode 100644
index 5cfff63b69a..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g729.h
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G729_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G729_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct G729_encinst_t_;
-struct G729_decinst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMG729 : public ACMGenericCodec {
- public:
- explicit ACMG729(int16_t codec_id);
- ~ACMG729();
-
- // for FEC
- ACMGenericCodec* CreateInstance(void);
-
- int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte);
-
- int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
-
- int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
-
- protected:
- int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type);
-
- int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
-
- void DestructEncoderSafe();
-
- void DestructDecoderSafe();
-
- int16_t InternalCreateEncoder();
-
- int16_t InternalCreateDecoder();
-
- void InternalDestructEncoderInst(void* ptr_inst);
-
- int16_t EnableDTX();
-
- int16_t DisableDTX();
-
- int32_t ReplaceInternalDTXSafe(const bool replace_internal_dtx);
-
- int32_t IsInternalDTXReplacedSafe(bool* internal_dtx_replaced);
-
- G729_encinst_t_* encoder_inst_ptr_;
- G729_decinst_t_* decoder_inst_ptr_;
-
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G729_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7291.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7291.cc
deleted file mode 100644
index 0da6c99d21e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7291.cc
+++ /dev/null
@@ -1,349 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_g7291.h"
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-#ifdef WEBRTC_CODEC_G729_1
-// NOTE! G.729.1 is not included in the open-source package. Modify this file
-// or your codec API to match the function calls and names of used G.729.1 API
-// file.
-#include "g7291_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_G729_1
-
-ACMG729_1::ACMG729_1(int16_t /* codec_id */)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL),
- my_rate_(32000),
- flag_8khz_(0),
- flag_g729_mode_(0) {
- return;
-}
-
-ACMG729_1::~ACMG729_1() {
- return;
-}
-
-int16_t ACMG729_1::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMG729_1::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMG729_1::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMG729_1::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMG729_1::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMG729_1::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMG729_1::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMG729_1::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMG729_1::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMG729_1::DestructDecoderSafe() {
- return;
-}
-
-void ACMG729_1::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-int16_t ACMG729_1::SetBitRateSafe(const int32_t /*rate*/) {
- return -1;
-}
-
-#else //===================== Actual Implementation =======================
-
-struct G729_1_inst_t_;
-
-ACMG729_1::ACMG729_1(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL),
- my_rate_(32000), // Default rate.
- flag_8khz_(0),
- flag_g729_mode_(0) {
- // TODO(tlegrand): We should add codec_id as a input variable to the
- // constructor of ACMGenericCodec.
- codec_id_ = codec_id;
- return;
-}
-
-ACMG729_1::~ACMG729_1() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcG7291_Free(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- WebRtcG7291_Free(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- return;
-}
-
-int16_t ACMG729_1::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
-
- // Initialize before entering the loop
- int16_t num_encoded_samples = 0;
- *bitstream_len_byte = 0;
-
- int16_t byte_length_frame = 0;
-
- // Derive number of 20ms frames per encoded packet.
- // [1,2,3] <=> [20,40,60]ms <=> [320,640,960] samples
- int16_t num_20ms_frames = (frame_len_smpl_ / 320);
- // Byte length for the frame. +1 is for rate information.
- byte_length_frame = my_rate_ / (8 * 50) * num_20ms_frames + (1 -
- flag_g729_mode_);
-
- // The following might be revised if we have G729.1 Annex C (support for DTX);
- do {
- *bitstream_len_byte = WebRtcG7291_Encode(encoder_inst_ptr_,
- &in_audio_[in_audio_ix_read_],
- (int16_t*) bitstream,
- my_rate_, num_20ms_frames);
-
- // increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += 160;
-
- // sanity check
- if (*bitstream_len_byte < 0) {
- // error has happened
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalEncode: Encode error for G729_1");
- *bitstream_len_byte = 0;
- return -1;
- }
-
- num_encoded_samples += 160;
- } while (*bitstream_len_byte == 0);
-
- // This criteria will change if we have Annex C.
- if (*bitstream_len_byte != byte_length_frame) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalEncode: Encode error for G729_1");
- *bitstream_len_byte = 0;
- return -1;
- }
-
- if (num_encoded_samples != frame_len_smpl_) {
- *bitstream_len_byte = 0;
- return -1;
- }
-
- return *bitstream_len_byte;
-}
-
-int16_t ACMG729_1::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMG729_1::InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) {
- //set the bit rate and initialize
- my_rate_ = codec_params->codec_inst.rate;
- return SetBitRateSafe((uint32_t) my_rate_);
-}
-
-int16_t ACMG729_1::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- if (WebRtcG7291_DecoderInit(decoder_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitDecoder: init decoder failed for G729_1");
- return -1;
- }
- return 0;
-}
-
-int32_t ACMG729_1::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CodeDef: Decoder uninitialized for G729_1");
- return -1;
- }
-
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_G729_FUNCTION."
- // Then call NetEQ to add the codec to it's
- // database.
- SET_CODEC_PAR((codec_def), kDecoderG729_1, codec_inst.pltype,
- decoder_inst_ptr_, 16000);
- SET_G729_1_FUNCTIONS((codec_def));
- return 0;
-}
-
-ACMGenericCodec* ACMG729_1::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMG729_1::InternalCreateEncoder() {
- if (WebRtcG7291_Create(&encoder_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateEncoder: create encoder failed for G729_1");
- return -1;
- }
- return 0;
-}
-
-void ACMG729_1::DestructEncoderSafe() {
- encoder_exist_ = false;
- encoder_initialized_ = false;
- if (encoder_inst_ptr_ != NULL) {
- WebRtcG7291_Free(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
-}
-
-int16_t ACMG729_1::InternalCreateDecoder() {
- if (WebRtcG7291_Create(&decoder_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateDecoder: create decoder failed for G729_1");
- return -1;
- }
- return 0;
-}
-
-void ACMG729_1::DestructDecoderSafe() {
- decoder_exist_ = false;
- decoder_initialized_ = false;
- if (decoder_inst_ptr_ != NULL) {
- WebRtcG7291_Free(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
-}
-
-void ACMG729_1::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- // WebRtcG7291_Free((G729_1_inst_t*)ptrInst);
- }
- return;
-}
-
-int16_t ACMG729_1::SetBitRateSafe(const int32_t rate) {
- // allowed rates: { 8000, 12000, 14000, 16000, 18000, 20000,
- // 22000, 24000, 26000, 28000, 30000, 32000};
- // TODO(tlegrand): This check exists in one other place two. Should be
- // possible to reuse code.
- switch (rate) {
- case 8000: {
- my_rate_ = 8000;
- break;
- }
- case 12000: {
- my_rate_ = 12000;
- break;
- }
- case 14000: {
- my_rate_ = 14000;
- break;
- }
- case 16000: {
- my_rate_ = 16000;
- break;
- }
- case 18000: {
- my_rate_ = 18000;
- break;
- }
- case 20000: {
- my_rate_ = 20000;
- break;
- }
- case 22000: {
- my_rate_ = 22000;
- break;
- }
- case 24000: {
- my_rate_ = 24000;
- break;
- }
- case 26000: {
- my_rate_ = 26000;
- break;
- }
- case 28000: {
- my_rate_ = 28000;
- break;
- }
- case 30000: {
- my_rate_ = 30000;
- break;
- }
- case 32000: {
- my_rate_ = 32000;
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "SetBitRateSafe: Invalid rate G729_1");
- return -1;
- }
- }
-
- // Re-init with new rate
- if (WebRtcG7291_EncoderInit(encoder_inst_ptr_, my_rate_, flag_8khz_,
- flag_g729_mode_) >= 0) {
- encoder_params_.codec_inst.rate = my_rate_;
- return 0;
- } else {
- return -1;
- }
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7291.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7291.h
deleted file mode 100644
index bac7faf8368..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_g7291.h
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G7291_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G7291_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct G729_1_inst_t_;
-struct G729_1_inst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMG729_1 : public ACMGenericCodec {
- public:
- explicit ACMG729_1(int16_t codec_id);
- ~ACMG729_1();
-
- // for FEC
- ACMGenericCodec* CreateInstance(void);
-
- int16_t InternalEncode(uint8_t* bitstream, int16_t* bitstream_len_byte);
-
- int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
-
- int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
-
- protected:
- int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type);
-
- int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
-
- void DestructEncoderSafe();
-
- void DestructDecoderSafe();
-
- int16_t InternalCreateEncoder();
-
- int16_t InternalCreateDecoder();
-
- void InternalDestructEncoderInst(void* ptr_inst);
-
- int16_t SetBitRateSafe(const int32_t rate);
-
- G729_1_inst_t_* encoder_inst_ptr_;
- G729_1_inst_t_* decoder_inst_ptr_;
-
- uint16_t my_rate_;
- int16_t flag_8khz_;
- int16_t flag_g729_mode_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G7291_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_generic_codec.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_generic_codec.cc
deleted file mode 100644
index 4e53b873a1f..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_generic_codec.cc
+++ /dev/null
@@ -1,1263 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-#include <assert.h>
-#include <string.h>
-
-#include "webrtc/common_audio/vad/include/webrtc_vad.h"
-#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-// Enum for CNG
-enum {
- kMaxPLCParamsCNG = WEBRTC_CNG_MAX_LPC_ORDER,
- kNewCNGNumPLCParams = 8
-};
-
-// Interval for sending new CNG parameters (SID frames) is 100 msec.
-enum {
- kCngSidIntervalMsec = 100
-};
-
-// We set some of the variables to invalid values as a check point
-// if a proper initialization has happened. Another approach is
-// to initialize to a default codec that we are sure is always included.
-ACMGenericCodec::ACMGenericCodec()
- : in_audio_ix_write_(0),
- in_audio_ix_read_(0),
- in_timestamp_ix_write_(0),
- in_audio_(NULL),
- in_timestamp_(NULL),
- frame_len_smpl_(-1), // invalid value
- num_channels_(1),
- codec_id_(-1), // invalid value
- num_missed_samples_(0),
- encoder_exist_(false),
- decoder_exist_(false),
- encoder_initialized_(false),
- decoder_initialized_(false),
- registered_in_neteq_(false),
- has_internal_dtx_(false),
- ptr_vad_inst_(NULL),
- vad_enabled_(false),
- vad_mode_(VADNormal),
- dtx_enabled_(false),
- ptr_dtx_inst_(NULL),
- num_lpc_params_(kNewCNGNumPLCParams),
- sent_cn_previous_(false),
- is_master_(true),
- prev_frame_cng_(0),
- neteq_decode_lock_(NULL),
- codec_wrapper_lock_(*RWLockWrapper::CreateRWLock()),
- last_encoded_timestamp_(0),
- last_timestamp_(0xD87F3F9F),
- is_audio_buff_fresh_(true),
- unique_id_(0) {
- // Initialize VAD vector.
- for (int i = 0; i < MAX_FRAME_SIZE_10MSEC; i++) {
- vad_label_[i] = 0;
- }
- // Nullify memory for encoder and decoder, and set payload type to an
- // invalid value.
- memset(&encoder_params_, 0, sizeof(WebRtcACMCodecParams));
- encoder_params_.codec_inst.pltype = -1;
- memset(&decoder_params_, 0, sizeof(WebRtcACMCodecParams));
- decoder_params_.codec_inst.pltype = -1;
-}
-
-ACMGenericCodec::~ACMGenericCodec() {
- // Check all the members which are pointers, and if they are not NULL
- // delete/free them.
- if (ptr_vad_inst_ != NULL) {
- WebRtcVad_Free(ptr_vad_inst_);
- ptr_vad_inst_ = NULL;
- }
- if (in_audio_ != NULL) {
- delete[] in_audio_;
- in_audio_ = NULL;
- }
- if (in_timestamp_ != NULL) {
- delete[] in_timestamp_;
- in_timestamp_ = NULL;
- }
- if (ptr_dtx_inst_ != NULL) {
- WebRtcCng_FreeEnc(ptr_dtx_inst_);
- ptr_dtx_inst_ = NULL;
- }
- delete &codec_wrapper_lock_;
-}
-
-int32_t ACMGenericCodec::Add10MsData(const uint32_t timestamp,
- const int16_t* data,
- const uint16_t length_smpl,
- const uint8_t audio_channel) {
- WriteLockScoped wl(codec_wrapper_lock_);
- return Add10MsDataSafe(timestamp, data, length_smpl, audio_channel);
-}
-
-int32_t ACMGenericCodec::Add10MsDataSafe(const uint32_t timestamp,
- const int16_t* data,
- const uint16_t length_smpl,
- const uint8_t audio_channel) {
- // The codec expects to get data in correct sampling rate. Get the sampling
- // frequency of the codec.
- uint16_t plfreq_hz;
- if (EncoderSampFreq(plfreq_hz) < 0) {
- return -1;
- }
-
- // Sanity check to make sure the length of the input corresponds to 10 ms.
- if ((plfreq_hz / 100) != length_smpl) {
- // This is not 10 ms of audio, given the sampling frequency of the codec.
- return -1;
- }
-
- if (last_timestamp_ == timestamp) {
- // Same timestamp as the last time, overwrite.
- if ((in_audio_ix_write_ >= length_smpl * audio_channel) &&
- (in_timestamp_ix_write_ > 0)) {
- in_audio_ix_write_ -= length_smpl * audio_channel;
- in_timestamp_ix_write_--;
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, unique_id_,
- "Adding 10ms with previous timestamp, overwriting the "
- "previous 10ms");
- } else {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, unique_id_,
- "Adding 10ms with previous timestamp, this will sound bad");
- }
- }
-
- last_timestamp_ = timestamp;
-
- // If the data exceeds the buffer size, we throw away the oldest data and
- // add the newly received 10 msec at the end.
- if ((in_audio_ix_write_ + length_smpl * audio_channel) >
- AUDIO_BUFFER_SIZE_W16) {
- // Get the number of samples to be overwritten.
- int16_t missed_samples = in_audio_ix_write_ + length_smpl * audio_channel -
- AUDIO_BUFFER_SIZE_W16;
-
- // Move the data (overwrite the old data).
- memmove(in_audio_, in_audio_ + missed_samples,
- (AUDIO_BUFFER_SIZE_W16 - length_smpl * audio_channel) *
- sizeof(int16_t));
-
- // Copy the new data.
- memcpy(in_audio_ + (AUDIO_BUFFER_SIZE_W16 - length_smpl * audio_channel),
- data, length_smpl * audio_channel * sizeof(int16_t));
-
- // Get the number of 10 ms blocks which are overwritten.
- int16_t missed_10ms_blocks =static_cast<int16_t>(
- (missed_samples / audio_channel * 100) / plfreq_hz);
-
- // Move the timestamps.
- memmove(in_timestamp_, in_timestamp_ + missed_10ms_blocks,
- (in_timestamp_ix_write_ - missed_10ms_blocks) * sizeof(uint32_t));
- in_timestamp_ix_write_ -= missed_10ms_blocks;
- assert(in_timestamp_ix_write_ >= 0);
- in_timestamp_[in_timestamp_ix_write_] = timestamp;
- in_timestamp_ix_write_++;
-
- // Buffer is full.
- in_audio_ix_write_ = AUDIO_BUFFER_SIZE_W16;
- IncreaseNoMissedSamples(missed_samples);
- is_audio_buff_fresh_ = false;
- return -missed_samples;
- }
-
- // Store the input data in our data buffer.
- memcpy(in_audio_ + in_audio_ix_write_, data,
- length_smpl * audio_channel * sizeof(int16_t));
- in_audio_ix_write_ += length_smpl * audio_channel;
-
- assert(in_timestamp_ix_write_ < TIMESTAMP_BUFFER_SIZE_W32);
- assert(in_timestamp_ix_write_ >= 0);
-
- in_timestamp_[in_timestamp_ix_write_] = timestamp;
- in_timestamp_ix_write_++;
- is_audio_buff_fresh_ = false;
- return 0;
-}
-
-bool ACMGenericCodec::HasFrameToEncode() const {
- ReadLockScoped lockCodec(codec_wrapper_lock_);
- if (in_audio_ix_write_ < frame_len_smpl_ * num_channels_)
- return false;
- return true;
-}
-
-int16_t ACMGenericCodec::Encode(uint8_t* bitstream,
- int16_t* bitstream_len_byte,
- uint32_t* timestamp,
- WebRtcACMEncodingType* encoding_type) {
- if (!HasFrameToEncode()) {
- // There is not enough audio
- *timestamp = 0;
- *bitstream_len_byte = 0;
- // Doesn't really matter what this parameter set to
- *encoding_type = kNoEncoding;
- return 0;
- }
- WriteLockScoped lockCodec(codec_wrapper_lock_);
- ReadLockScoped lockNetEq(*neteq_decode_lock_);
-
- // Not all codecs accept the whole frame to be pushed into encoder at once.
- // Some codecs needs to be feed with a specific number of samples different
- // from the frame size. If this is the case, |myBasicCodingBlockSmpl| will
- // report a number different from 0, and we will loop over calls to encoder
- // further down, until we have encode a complete frame.
- const int16_t my_basic_coding_block_smpl =
- ACMCodecDB::BasicCodingBlock(codec_id_);
- if (my_basic_coding_block_smpl < 0 || !encoder_initialized_ ||
- !encoder_exist_) {
- // This should not happen, but in case it does, report no encoding done.
- *timestamp = 0;
- *bitstream_len_byte = 0;
- *encoding_type = kNoEncoding;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "EncodeSafe: error, basic coding sample block is negative");
- return -1;
- }
- // This makes the internal encoder read from the beginning of the buffer.
- in_audio_ix_read_ = 0;
- *timestamp = in_timestamp_[0];
-
- // Process the audio through VAD. The function will set |_vad_labels|.
- // If VAD is disabled all entries in |_vad_labels| are set to ONE (active).
- int16_t status = 0;
- int16_t dtx_processed_samples = 0;
- status = ProcessFrameVADDTX(bitstream, bitstream_len_byte,
- &dtx_processed_samples);
- if (status < 0) {
- *timestamp = 0;
- *bitstream_len_byte = 0;
- *encoding_type = kNoEncoding;
- } else {
- if (dtx_processed_samples > 0) {
- // Dtx have processed some samples, and even if a bit-stream is generated
- // we should not do any encoding (normally there won't be enough data).
-
- // Setting the following makes sure that the move of audio data and
- // timestamps done correctly.
- in_audio_ix_read_ = dtx_processed_samples;
- // This will let the owner of ACMGenericCodec to know that the
- // generated bit-stream is DTX to use correct payload type.
- uint16_t samp_freq_hz;
- EncoderSampFreq(samp_freq_hz);
- if (samp_freq_hz == 8000) {
- *encoding_type = kPassiveDTXNB;
- } else if (samp_freq_hz == 16000) {
- *encoding_type = kPassiveDTXWB;
- } else if (samp_freq_hz == 32000) {
- *encoding_type = kPassiveDTXSWB;
- } else if (samp_freq_hz == 48000) {
- *encoding_type = kPassiveDTXFB;
- } else {
- status = -1;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "EncodeSafe: Wrong sampling frequency for DTX.");
- }
-
- // Transport empty frame if we have an empty bitstream.
- if ((*bitstream_len_byte == 0) &&
- (sent_cn_previous_ ||
- ((in_audio_ix_write_ - in_audio_ix_read_) <= 0))) {
- // Makes sure we transmit an empty frame.
- *bitstream_len_byte = 1;
- *encoding_type = kNoEncoding;
- }
- sent_cn_previous_ = true;
- } else {
- // We should encode the audio frame. Either VAD and/or DTX is off, or the
- // audio was considered "active".
-
- sent_cn_previous_ = false;
- if (my_basic_coding_block_smpl == 0) {
- // This codec can handle all allowed frame sizes as basic coding block.
- status = InternalEncode(bitstream, bitstream_len_byte);
- if (status < 0) {
- // TODO(tlegrand): Maybe reseting the encoder to be fresh for the next
- // frame.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding,
- unique_id_, "EncodeSafe: error in internal_encode");
- *bitstream_len_byte = 0;
- *encoding_type = kNoEncoding;
- }
- } else {
- // A basic-coding-block for this codec is defined so we loop over the
- // audio with the steps of the basic-coding-block.
- int16_t tmp_bitstream_len_byte;
-
- // Reset the variables which will be incremented in the loop.
- *bitstream_len_byte = 0;
- do {
- status = InternalEncode(&bitstream[*bitstream_len_byte],
- &tmp_bitstream_len_byte);
- *bitstream_len_byte += tmp_bitstream_len_byte;
-
- // Guard Against errors and too large payloads.
- if ((status < 0) || (*bitstream_len_byte > MAX_PAYLOAD_SIZE_BYTE)) {
- // Error has happened, and even if we are in the middle of a full
- // frame we have to exit. Before exiting, whatever bits are in the
- // buffer are probably corrupted, so we ignore them.
- *bitstream_len_byte = 0;
- *encoding_type = kNoEncoding;
- // We might have come here because of the second condition.
- status = -1;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding,
- unique_id_, "EncodeSafe: error in InternalEncode");
- // break from the loop
- break;
- }
- } while (in_audio_ix_read_ < frame_len_smpl_ * num_channels_);
- }
- if (status >= 0) {
- *encoding_type = (vad_label_[0] == 1) ? kActiveNormalEncoded :
- kPassiveNormalEncoded;
- // Transport empty frame if we have an empty bitstream.
- if ((*bitstream_len_byte == 0) &&
- ((in_audio_ix_write_ - in_audio_ix_read_) <= 0)) {
- // Makes sure we transmit an empty frame.
- *bitstream_len_byte = 1;
- *encoding_type = kNoEncoding;
- }
- }
- }
- }
-
- // Move the timestamp buffer according to the number of 10 ms blocks
- // which are read.
- uint16_t samp_freq_hz;
- EncoderSampFreq(samp_freq_hz);
- int16_t num_10ms_blocks = static_cast<int16_t>(
- (in_audio_ix_read_ / num_channels_ * 100) / samp_freq_hz);
- if (in_timestamp_ix_write_ > num_10ms_blocks) {
- memmove(in_timestamp_, in_timestamp_ + num_10ms_blocks,
- (in_timestamp_ix_write_ - num_10ms_blocks) * sizeof(int32_t));
- }
- in_timestamp_ix_write_ -= num_10ms_blocks;
- assert(in_timestamp_ix_write_ >= 0);
- // Remove encoded audio and move next audio to be encoded to the beginning
- // of the buffer. Accordingly, adjust the read and write indices.
- if (in_audio_ix_read_ < in_audio_ix_write_) {
- memmove(in_audio_, &in_audio_[in_audio_ix_read_],
- (in_audio_ix_write_ - in_audio_ix_read_) * sizeof(int16_t));
- }
- in_audio_ix_write_ -= in_audio_ix_read_;
- assert(in_timestamp_ix_write_ >= 0);
- in_audio_ix_read_ = 0;
- last_encoded_timestamp_ = *timestamp;
- return (status < 0) ? (-1) : (*bitstream_len_byte);
-}
-
-int16_t ACMGenericCodec::Decode(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) {
- WriteLockScoped wl(codec_wrapper_lock_);
- return DecodeSafe(bitstream, bitstream_len_byte, audio, audio_samples,
- speech_type);
-}
-
-bool ACMGenericCodec::EncoderInitialized() {
- ReadLockScoped rl(codec_wrapper_lock_);
- return encoder_initialized_;
-}
-
-bool ACMGenericCodec::DecoderInitialized() {
- ReadLockScoped rl(codec_wrapper_lock_);
- return decoder_initialized_;
-}
-
-int32_t ACMGenericCodec::RegisterInNetEq(ACMNetEQ* neteq,
- const CodecInst& codec_inst) {
- WebRtcNetEQ_CodecDef codec_def;
- WriteLockScoped wl(codec_wrapper_lock_);
-
- if (CodecDef(codec_def, codec_inst) < 0) {
- // Failed to register the decoder.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "RegisterInNetEq: error, failed to register");
- registered_in_neteq_ = false;
- return -1;
- } else {
- if (neteq->AddCodec(&codec_def, is_master_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "RegisterInNetEq: error, failed to add codec");
- registered_in_neteq_ = false;
- return -1;
- }
- // Succeeded registering the decoder.
- registered_in_neteq_ = true;
- return 0;
- }
-}
-
-int16_t ACMGenericCodec::EncoderParams(WebRtcACMCodecParams* enc_params) {
- ReadLockScoped rl(codec_wrapper_lock_);
- return EncoderParamsSafe(enc_params);
-}
-
-int16_t ACMGenericCodec::EncoderParamsSafe(WebRtcACMCodecParams* enc_params) {
- // Codec parameters are valid only if the encoder is initialized.
- if (encoder_initialized_) {
- int32_t current_rate;
- memcpy(enc_params, &encoder_params_, sizeof(WebRtcACMCodecParams));
- current_rate = enc_params->codec_inst.rate;
- CurrentRate(current_rate);
- enc_params->codec_inst.rate = current_rate;
- return 0;
- } else {
- enc_params->codec_inst.plname[0] = '\0';
- enc_params->codec_inst.pltype = -1;
- enc_params->codec_inst.pacsize = 0;
- enc_params->codec_inst.rate = 0;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "EncoderParamsSafe: error, encoder not initialized");
- return -1;
- }
-}
-
-bool ACMGenericCodec::DecoderParams(WebRtcACMCodecParams* dec_params,
- const uint8_t payload_type) {
- ReadLockScoped rl(codec_wrapper_lock_);
- return DecoderParamsSafe(dec_params, payload_type);
-}
-
-bool ACMGenericCodec::DecoderParamsSafe(WebRtcACMCodecParams* dec_params,
- const uint8_t payload_type) {
- // Decoder parameters are valid only if decoder is initialized.
- if (decoder_initialized_) {
- if (payload_type == decoder_params_.codec_inst.pltype) {
- memcpy(dec_params, &decoder_params_, sizeof(WebRtcACMCodecParams));
- return true;
- }
- }
-
- dec_params->codec_inst.plname[0] = '\0';
- dec_params->codec_inst.pltype = -1;
- dec_params->codec_inst.pacsize = 0;
- dec_params->codec_inst.rate = 0;
- return false;
-}
-
-int16_t ACMGenericCodec::ResetEncoder() {
- WriteLockScoped lockCodec(codec_wrapper_lock_);
- ReadLockScoped lockNetEq(*neteq_decode_lock_);
- return ResetEncoderSafe();
-}
-
-int16_t ACMGenericCodec::ResetEncoderSafe() {
- if (!encoder_exist_ || !encoder_initialized_) {
- // We don't reset if encoder doesn't exists or isn't initialized yet.
- return 0;
- }
-
- in_audio_ix_write_ = 0;
- in_audio_ix_read_ = 0;
- in_timestamp_ix_write_ = 0;
- num_missed_samples_ = 0;
- is_audio_buff_fresh_ = true;
- memset(in_audio_, 0, AUDIO_BUFFER_SIZE_W16 * sizeof(int16_t));
- memset(in_timestamp_, 0, TIMESTAMP_BUFFER_SIZE_W32 * sizeof(int32_t));
-
- // Store DTX/VAD parameters.
- bool enable_vad = vad_enabled_;
- bool enable_dtx = dtx_enabled_;
- ACMVADMode mode = vad_mode_;
-
- // Reset the encoder.
- if (InternalResetEncoder() < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "ResetEncoderSafe: error in reset encoder");
- return -1;
- }
-
- // Disable DTX & VAD to delete the states and have a fresh start.
- DisableDTX();
- DisableVAD();
-
- // Set DTX/VAD.
- int status = SetVADSafe(&enable_dtx, &enable_vad, &mode);
- dtx_enabled_ = enable_dtx;
- vad_enabled_ = enable_vad;
- vad_mode_ = mode;
- return status;
-}
-
-int16_t ACMGenericCodec::InternalResetEncoder() {
- // Call the codecs internal encoder initialization/reset function.
- return InternalInitEncoder(&encoder_params_);
-}
-
-int16_t ACMGenericCodec::InitEncoder(WebRtcACMCodecParams* codec_params,
- bool force_initialization) {
- WriteLockScoped lockCodec(codec_wrapper_lock_);
- ReadLockScoped lockNetEq(*neteq_decode_lock_);
- return InitEncoderSafe(codec_params, force_initialization);
-}
-
-int16_t ACMGenericCodec::InitEncoderSafe(WebRtcACMCodecParams* codec_params,
- bool force_initialization) {
- // Check if we got a valid set of parameters.
- int mirrorID;
- int codec_number = ACMCodecDB::CodecNumber(&(codec_params->codec_inst),
- &mirrorID);
- if (codec_number < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InitEncoderSafe: error, codec number negative");
- return -1;
- }
- // Check if the parameters are for this codec.
- if ((codec_id_ >= 0) && (codec_id_ != codec_number) &&
- (codec_id_ != mirrorID)) {
- // The current codec is not the same as the one given by codec_params.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InitEncoderSafe: current codec is not the same as the one "
- "given by codec_params");
- return -1;
- }
-
- if (!CanChangeEncodingParam(codec_params->codec_inst)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InitEncoderSafe: cannot change encoding parameters");
- return -1;
- }
-
- if (encoder_initialized_ && !force_initialization) {
- // The encoder is already initialized, and we don't want to force
- // initialization.
- return 0;
- }
- int16_t status;
- if (!encoder_exist_) {
- // New encoder, start with creating.
- encoder_initialized_ = false;
- status = CreateEncoder();
- if (status < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InitEncoderSafe: cannot create encoder");
- return -1;
- } else {
- encoder_exist_ = true;
- }
- }
- frame_len_smpl_ = (codec_params->codec_inst).pacsize;
- num_channels_ = codec_params->codec_inst.channels;
- status = InternalInitEncoder(codec_params);
- if (status < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InitEncoderSafe: error in init encoder");
- encoder_initialized_ = false;
- return -1;
- } else {
- // Store encoder parameters.
- memcpy(&encoder_params_, codec_params, sizeof(WebRtcACMCodecParams));
- encoder_initialized_ = true;
- if (in_audio_ == NULL) {
- in_audio_ = new int16_t[AUDIO_BUFFER_SIZE_W16];
- if (in_audio_ == NULL) {
- return -1;
- }
- }
- if (in_timestamp_ == NULL) {
- in_timestamp_ = new uint32_t[TIMESTAMP_BUFFER_SIZE_W32];
- if (in_timestamp_ == NULL) {
- return -1;
- }
- }
- // Fresh start for audio buffer.
- is_audio_buff_fresh_ = true;
- memset(in_audio_, 0, AUDIO_BUFFER_SIZE_W16 * sizeof(int16_t));
- memset(in_timestamp_, 0, sizeof(uint32_t) * TIMESTAMP_BUFFER_SIZE_W32);
- in_audio_ix_write_ = 0;
- in_audio_ix_read_ = 0;
- in_timestamp_ix_write_ = 0;
- }
- status = SetVADSafe(&codec_params->enable_dtx, &codec_params->enable_vad,
- &codec_params->vad_mode);
- return status;
-}
-
-// TODO(tlegrand): Remove the function CanChangeEncodingParam. Returns true
-// for all codecs.
-bool ACMGenericCodec::CanChangeEncodingParam(CodecInst& /*codec_inst*/) {
- return true;
-}
-
-void ACMGenericCodec::CurrentRate(int32_t& /* rate_bps */) {
- return;
-}
-
-int16_t ACMGenericCodec::InitDecoder(WebRtcACMCodecParams* codec_params,
- bool force_initialization) {
- WriteLockScoped lockCodc(codec_wrapper_lock_);
- WriteLockScoped lockNetEq(*neteq_decode_lock_);
- return InitDecoderSafe(codec_params, force_initialization);
-}
-
-int16_t ACMGenericCodec::InitDecoderSafe(WebRtcACMCodecParams* codec_params,
- bool force_initialization) {
- int mirror_id;
- // Check if we got a valid set of parameters.
- int codec_number = ACMCodecDB::ReceiverCodecNumber(&codec_params->codec_inst,
- &mirror_id);
- if (codec_number < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InitDecoderSafe: error, invalid codec number");
- return -1;
- }
- // Check if the parameters are for this codec.
- if ((codec_id_ >= 0) && (codec_id_ != codec_number) &&
- (codec_id_ != mirror_id)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InitDecoderSafe: current codec is not the same as the one "
- "given by codec_params");
- // The current codec is not the same as the one given by codec_params.
- return -1;
- }
-
- if (decoder_initialized_ && !force_initialization) {
- // The decoder is already initialized, and we don't want to force
- // initialization.
- return 0;
- }
-
- int16_t status;
- if (!decoder_exist_) {
- // New decoder, start with creating.
- decoder_initialized_ = false;
- status = CreateDecoder();
- if (status < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InitDecoderSafe: cannot create decoder");
- return -1;
- } else {
- decoder_exist_ = true;
- }
- }
-
- status = InternalInitDecoder(codec_params);
- if (status < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InitDecoderSafe: cannot init decoder");
- decoder_initialized_ = false;
- return -1;
- } else {
- // Store decoder parameters.
- SaveDecoderParamSafe(codec_params);
- decoder_initialized_ = true;
- }
- return 0;
-}
-
-int16_t ACMGenericCodec::ResetDecoder(int16_t payload_type) {
- WriteLockScoped lockCodec(codec_wrapper_lock_);
- WriteLockScoped lockNetEq(*neteq_decode_lock_);
- return ResetDecoderSafe(payload_type);
-}
-
-int16_t ACMGenericCodec::ResetDecoderSafe(int16_t payload_type) {
- WebRtcACMCodecParams decoder_params;
- if (!decoder_exist_ || !decoder_initialized_) {
- return 0;
- }
- // Initialization of the decoder should work for all the codec. For codecs
- // that needs to keep some states an overloading implementation of
- // |DecoderParamsSafe| exists.
- DecoderParamsSafe(&decoder_params, static_cast<uint8_t>(payload_type));
- return InternalInitDecoder(&decoder_params);
-}
-
-void ACMGenericCodec::ResetNoMissedSamples() {
- WriteLockScoped cs(codec_wrapper_lock_);
- num_missed_samples_ = 0;
-}
-
-void ACMGenericCodec::IncreaseNoMissedSamples(const int16_t num_samples) {
- num_missed_samples_ += num_samples;
-}
-
-// Get the number of missed samples, this can be public.
-uint32_t ACMGenericCodec::NoMissedSamples() const {
- ReadLockScoped cs(codec_wrapper_lock_);
- return num_missed_samples_;
-}
-
-void ACMGenericCodec::DestructEncoder() {
- WriteLockScoped wl(codec_wrapper_lock_);
-
- // Disable VAD and delete the instance.
- if (ptr_vad_inst_ != NULL) {
- WebRtcVad_Free(ptr_vad_inst_);
- ptr_vad_inst_ = NULL;
- }
- vad_enabled_ = false;
- vad_mode_ = VADNormal;
-
- // Disable DTX and delete the instance.
- dtx_enabled_ = false;
- if (ptr_dtx_inst_ != NULL) {
- WebRtcCng_FreeEnc(ptr_dtx_inst_);
- ptr_dtx_inst_ = NULL;
- }
- num_lpc_params_ = kNewCNGNumPLCParams;
-
- DestructEncoderSafe();
-}
-
-void ACMGenericCodec::DestructDecoder() {
- WriteLockScoped wl(codec_wrapper_lock_);
- decoder_params_.codec_inst.pltype = -1;
- DestructDecoderSafe();
-}
-
-int16_t ACMGenericCodec::SetBitRate(const int32_t bitrate_bps) {
- WriteLockScoped wl(codec_wrapper_lock_);
- return SetBitRateSafe(bitrate_bps);
-}
-
-int16_t ACMGenericCodec::SetBitRateSafe(const int32_t bitrate_bps) {
- // If the codec can change the bit-rate this function is overloaded.
- // Otherwise the only acceptable value is the one that is in the database.
- CodecInst codec_params;
- if (ACMCodecDB::Codec(codec_id_, &codec_params) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "SetBitRateSafe: error in ACMCodecDB::Codec");
- return -1;
- }
- if (codec_params.rate != bitrate_bps) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "SetBitRateSafe: rate value is not acceptable");
- return -1;
- } else {
- return 0;
- }
-}
-
-// iSAC specific functions:
-int32_t ACMGenericCodec::GetEstimatedBandwidth() {
- WriteLockScoped wl(codec_wrapper_lock_);
- return GetEstimatedBandwidthSafe();
-}
-
-int32_t ACMGenericCodec::GetEstimatedBandwidthSafe() {
- // All codecs but iSAC will return -1.
- return -1;
-}
-
-int32_t ACMGenericCodec::SetEstimatedBandwidth(int32_t estimated_bandwidth) {
- WriteLockScoped wl(codec_wrapper_lock_);
- return SetEstimatedBandwidthSafe(estimated_bandwidth);
-}
-
-int32_t ACMGenericCodec::SetEstimatedBandwidthSafe(
- int32_t /*estimated_bandwidth*/) {
- // All codecs but iSAC will return -1.
- return -1;
-}
-// End of iSAC specific functions.
-
-int32_t ACMGenericCodec::GetRedPayload(uint8_t* red_payload,
- int16_t* payload_bytes) {
- WriteLockScoped wl(codec_wrapper_lock_);
- return GetRedPayloadSafe(red_payload, payload_bytes);
-}
-
-int32_t ACMGenericCodec::GetRedPayloadSafe(uint8_t* /* red_payload */,
- int16_t* /* payload_bytes */) {
- return -1; // Do nothing by default.
-}
-
-int16_t ACMGenericCodec::CreateEncoder() {
- int16_t status = 0;
- if (!encoder_exist_) {
- status = InternalCreateEncoder();
- // We just created the codec and obviously it is not initialized.
- encoder_initialized_ = false;
- }
- if (status < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CreateEncoder: error in internal create encoder");
- encoder_exist_ = false;
- } else {
- encoder_exist_ = true;
- }
- return status;
-}
-
-int16_t ACMGenericCodec::CreateDecoder() {
- int16_t status = 0;
- if (!decoder_exist_) {
- status = InternalCreateDecoder();
- // Decoder just created and obviously it is not initialized.
- decoder_initialized_ = false;
- }
- if (status < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CreateDecoder: error in internal create decoder");
- decoder_exist_ = false;
- } else {
- decoder_exist_ = true;
- }
- return status;
-}
-
-void ACMGenericCodec::DestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WriteLockScoped lockCodec(codec_wrapper_lock_);
- ReadLockScoped lockNetEq(*neteq_decode_lock_);
- InternalDestructEncoderInst(ptr_inst);
- }
-}
-
-// Get the current audio buffer including read and write states, and timestamps.
-int16_t ACMGenericCodec::AudioBuffer(WebRtcACMAudioBuff& audio_buff) {
- ReadLockScoped cs(codec_wrapper_lock_);
- memcpy(audio_buff.in_audio, in_audio_,
- AUDIO_BUFFER_SIZE_W16 * sizeof(int16_t));
- audio_buff.in_audio_ix_read = in_audio_ix_read_;
- audio_buff.in_audio_ix_write = in_audio_ix_write_;
- memcpy(audio_buff.in_timestamp, in_timestamp_,
- TIMESTAMP_BUFFER_SIZE_W32 * sizeof(uint32_t));
- audio_buff.in_timestamp_ix_write = in_timestamp_ix_write_;
- audio_buff.last_timestamp = last_timestamp_;
- return 0;
-}
-
-// Set the audio buffer.
-int16_t ACMGenericCodec::SetAudioBuffer(WebRtcACMAudioBuff& audio_buff) {
- WriteLockScoped cs(codec_wrapper_lock_);
- memcpy(in_audio_, audio_buff.in_audio,
- AUDIO_BUFFER_SIZE_W16 * sizeof(int16_t));
- in_audio_ix_read_ = audio_buff.in_audio_ix_read;
- in_audio_ix_write_ = audio_buff.in_audio_ix_write;
- memcpy(in_timestamp_, audio_buff.in_timestamp,
- TIMESTAMP_BUFFER_SIZE_W32 * sizeof(uint32_t));
- in_timestamp_ix_write_ = audio_buff.in_timestamp_ix_write;
- last_timestamp_ = audio_buff.last_timestamp;
- is_audio_buff_fresh_ = false;
- return 0;
-}
-
-uint32_t ACMGenericCodec::LastEncodedTimestamp() const {
- ReadLockScoped cs(codec_wrapper_lock_);
- return last_encoded_timestamp_;
-}
-
-uint32_t ACMGenericCodec::EarliestTimestamp() const {
- ReadLockScoped cs(codec_wrapper_lock_);
- return in_timestamp_[0];
-}
-
-int16_t ACMGenericCodec::SetVAD(bool* enable_dtx, bool* enable_vad,
- ACMVADMode* mode) {
- WriteLockScoped cs(codec_wrapper_lock_);
- return SetVADSafe(enable_dtx, enable_vad, mode);
-}
-
-int16_t ACMGenericCodec::SetVADSafe(bool* enable_dtx, bool* enable_vad,
- ACMVADMode* mode) {
- if (!STR_CASE_CMP(encoder_params_.codec_inst.plname, "OPUS") ||
- encoder_params_.codec_inst.channels == 2 ) {
- // VAD/DTX is not supported for Opus (even if sending mono), or other
- // stereo codecs.
- DisableDTX();
- DisableVAD();
- *enable_dtx = false;
- *enable_vad = false;
- return 0;
- }
-
- if (*enable_dtx) {
- // Make G729 AnnexB a special case.
- if (!STR_CASE_CMP(encoder_params_.codec_inst.plname, "G729")
- && !has_internal_dtx_) {
- if (ACMGenericCodec::EnableDTX() < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "SetVADSafe: error in enable DTX");
- *enable_dtx = false;
- *enable_vad = vad_enabled_;
- return -1;
- }
- } else {
- if (EnableDTX() < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "SetVADSafe: error in enable DTX");
- *enable_dtx = false;
- *enable_vad = vad_enabled_;
- return -1;
- }
- }
-
- // If codec does not have internal DTX (normal case) enabling DTX requires
- // an active VAD. '*enable_dtx == true' overwrites VAD status.
- // If codec has internal DTX, practically we don't need WebRtc VAD, however,
- // we let the user to turn it on if they need call-backs on silence.
- if (!has_internal_dtx_) {
- // DTX is enabled, and VAD will be activated.
- *enable_vad = true;
- }
- } else {
- // Make G729 AnnexB a special case.
- if (!STR_CASE_CMP(encoder_params_.codec_inst.plname, "G729")
- && !has_internal_dtx_) {
- ACMGenericCodec::DisableDTX();
- *enable_dtx = false;
- } else {
- DisableDTX();
- *enable_dtx = false;
- }
- }
-
- int16_t status = (*enable_vad) ? EnableVAD(*mode) : DisableVAD();
- if (status < 0) {
- // Failed to set VAD, disable DTX.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "SetVADSafe: error in enable VAD");
- DisableDTX();
- *enable_dtx = false;
- *enable_vad = false;
- }
- return status;
-}
-
-int16_t ACMGenericCodec::EnableDTX() {
- if (has_internal_dtx_) {
- // We should not be here if we have internal DTX this function should be
- // overloaded by the derived class in this case.
- return -1;
- }
- if (!dtx_enabled_) {
- if (WebRtcCng_CreateEnc(&ptr_dtx_inst_) < 0) {
- ptr_dtx_inst_ = NULL;
- return -1;
- }
- uint16_t freq_hz;
- EncoderSampFreq(freq_hz);
- if (WebRtcCng_InitEnc(ptr_dtx_inst_, freq_hz, kCngSidIntervalMsec,
- num_lpc_params_) < 0) {
- // Couldn't initialize, has to return -1, and free the memory.
- WebRtcCng_FreeEnc(ptr_dtx_inst_);
- ptr_dtx_inst_ = NULL;
- return -1;
- }
- dtx_enabled_ = true;
- }
- return 0;
-}
-
-int16_t ACMGenericCodec::DisableDTX() {
- if (has_internal_dtx_) {
- // We should not be here if we have internal DTX this function should be
- // overloaded by the derived class in this case.
- return -1;
- }
- if (ptr_dtx_inst_ != NULL) {
- WebRtcCng_FreeEnc(ptr_dtx_inst_);
- ptr_dtx_inst_ = NULL;
- }
- dtx_enabled_ = false;
- return 0;
-}
-
-int16_t ACMGenericCodec::EnableVAD(ACMVADMode mode) {
- if ((mode < VADNormal) || (mode > VADVeryAggr)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "EnableVAD: error in VAD mode range");
- return -1;
- }
-
- if (!vad_enabled_) {
- if (WebRtcVad_Create(&ptr_vad_inst_) < 0) {
- ptr_vad_inst_ = NULL;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "EnableVAD: error in create VAD");
- return -1;
- }
- if (WebRtcVad_Init(ptr_vad_inst_) < 0) {
- WebRtcVad_Free(ptr_vad_inst_);
- ptr_vad_inst_ = NULL;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "EnableVAD: error in init VAD");
- return -1;
- }
- }
-
- // Set the VAD mode to the given value.
- if (WebRtcVad_set_mode(ptr_vad_inst_, mode) < 0) {
- // We failed to set the mode and we have to return -1. If we already have a
- // working VAD (vad_enabled_ == true) then we leave it to work. Otherwise,
- // the following will be executed.
- if (!vad_enabled_) {
- // We just created the instance but cannot set the mode we have to free
- // the memory.
- WebRtcVad_Free(ptr_vad_inst_);
- ptr_vad_inst_ = NULL;
- }
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, unique_id_,
- "EnableVAD: failed to set the VAD mode");
- return -1;
- }
- vad_mode_ = mode;
- vad_enabled_ = true;
- return 0;
-}
-
-int16_t ACMGenericCodec::DisableVAD() {
- if (ptr_vad_inst_ != NULL) {
- WebRtcVad_Free(ptr_vad_inst_);
- ptr_vad_inst_ = NULL;
- }
- vad_enabled_ = false;
- return 0;
-}
-
-int32_t ACMGenericCodec::ReplaceInternalDTX(const bool replace_internal_dtx) {
- WriteLockScoped cs(codec_wrapper_lock_);
- return ReplaceInternalDTXSafe(replace_internal_dtx);
-}
-
-int32_t ACMGenericCodec::ReplaceInternalDTXSafe(
- const bool /* replace_internal_dtx */) {
- return -1;
-}
-
-int32_t ACMGenericCodec::IsInternalDTXReplaced(bool* internal_dtx_replaced) {
- WriteLockScoped cs(codec_wrapper_lock_);
- return IsInternalDTXReplacedSafe(internal_dtx_replaced);
-}
-
-int32_t ACMGenericCodec::IsInternalDTXReplacedSafe(
- bool* internal_dtx_replaced) {
- *internal_dtx_replaced = false;
- return 0;
-}
-
-int16_t ACMGenericCodec::ProcessFrameVADDTX(uint8_t* bitstream,
- int16_t* bitstream_len_byte,
- int16_t* samples_processed) {
- if (!vad_enabled_) {
- // VAD not enabled, set all |vad_lable_[]| to 1 (speech detected).
- for (int n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
- vad_label_[n] = 1;
- }
- *samples_processed = 0;
- return 0;
- }
-
- uint16_t freq_hz;
- EncoderSampFreq(freq_hz);
-
- // Calculate number of samples in 10 ms blocks, and number ms in one frame.
- int16_t samples_in_10ms = static_cast<int16_t>(freq_hz / 100);
- int32_t frame_len_ms = static_cast<int32_t>(frame_len_smpl_) * 1000 / freq_hz;
- int16_t status;
-
- // Vector for storing maximum 30 ms of mono audio at 48 kHz.
- int16_t audio[1440];
-
- // Calculate number of VAD-blocks to process, and number of samples in each
- // block.
- int num_samples_to_process[2];
- if (frame_len_ms == 40) {
- // 20 ms in each VAD block.
- num_samples_to_process[0] = num_samples_to_process[1] = 2 * samples_in_10ms;
- } else {
- // For 10-30 ms framesizes, second VAD block will be size zero ms,
- // for 50 and 60 ms first VAD block will be 30 ms.
- num_samples_to_process[0] =
- (frame_len_ms > 30) ? 3 * samples_in_10ms : frame_len_smpl_;
- num_samples_to_process[1] = frame_len_smpl_ - num_samples_to_process[0];
- }
-
- int offset = 0;
- int loops = (num_samples_to_process[1] > 0) ? 2 : 1;
- for (int i = 0; i < loops; i++) {
- // TODO(turajs): Do we need to care about VAD together with stereo?
- // If stereo, calculate mean of the two channels.
- if (num_channels_ == 2) {
- for (int j = 0; j < num_samples_to_process[i]; j++) {
- audio[j] = (in_audio_[(offset + j) * 2] +
- in_audio_[(offset + j) * 2 + 1]) / 2;
- }
- offset = num_samples_to_process[0];
- } else {
- // Mono, copy data from in_audio_ to continue work on.
- memcpy(audio, in_audio_, sizeof(int16_t) * num_samples_to_process[i]);
- }
-
- // Call VAD.
- status = static_cast<int16_t>(WebRtcVad_Process(ptr_vad_inst_,
- static_cast<int>(freq_hz),
- audio,
- num_samples_to_process[i]));
- vad_label_[i] = status;
-
- if (status < 0) {
- // This will force that the data be removed from the buffer.
- *samples_processed += num_samples_to_process[i];
- return -1;
- }
-
- // If VAD decision non-active, update DTX. NOTE! We only do this if the
- // first part of a frame gets the VAD decision "inactive". Otherwise DTX
- // might say it is time to transmit SID frame, but we will encode the whole
- // frame, because the first part is active.
- *samples_processed = 0;
- if ((status == 0) && (i == 0) && dtx_enabled_ && !has_internal_dtx_) {
- int16_t bitstream_len;
- int num_10ms_frames = num_samples_to_process[i] / samples_in_10ms;
- *bitstream_len_byte = 0;
- for (int n = 0; n < num_10ms_frames; n++) {
- // This block is (passive) && (vad enabled). If first CNG after
- // speech, force SID by setting last parameter to "1".
- status = WebRtcCng_Encode(ptr_dtx_inst_, &audio[n * samples_in_10ms],
- samples_in_10ms, bitstream, &bitstream_len,
- !prev_frame_cng_);
- if (status < 0) {
- return -1;
- }
-
- // Update previous frame was CNG.
- prev_frame_cng_ = 1;
-
- *samples_processed += samples_in_10ms * num_channels_;
-
- // |bitstream_len_byte| will only be > 0 once per 100 ms.
- *bitstream_len_byte += bitstream_len;
- }
-
- // Check if all samples got processed by the DTX.
- if (*samples_processed != num_samples_to_process[i] * num_channels_) {
- // Set to zero since something went wrong. Shouldn't happen.
- *samples_processed = 0;
- }
- } else {
- // Update previous frame was not CNG.
- prev_frame_cng_ = 0;
- }
-
- if (*samples_processed > 0) {
- // The block contains inactive speech, and is processed by DTX.
- // Discontinue running VAD.
- break;
- }
- }
-
- return status;
-}
-
-int16_t ACMGenericCodec::SamplesLeftToEncode() {
- ReadLockScoped rl(codec_wrapper_lock_);
- return (frame_len_smpl_ <= in_audio_ix_write_) ? 0 :
- (frame_len_smpl_ - in_audio_ix_write_);
-}
-
-void ACMGenericCodec::SetUniqueID(const uint32_t id) {
- unique_id_ = id;
-}
-
-bool ACMGenericCodec::IsAudioBufferFresh() const {
- ReadLockScoped rl(codec_wrapper_lock_);
- return is_audio_buff_fresh_;
-}
-
-int16_t ACMGenericCodec::UpdateDecoderSampFreq(int16_t /* codec_id */) {
- return 0;
-}
-
-// This function is replaced by codec specific functions for some codecs.
-int16_t ACMGenericCodec::EncoderSampFreq(uint16_t& samp_freq_hz) {
- int32_t f;
- f = ACMCodecDB::CodecFreq(codec_id_);
- if (f < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "EncoderSampFreq: codec frequency is negative");
- return -1;
- } else {
- samp_freq_hz = static_cast<uint16_t>(f);
- return 0;
- }
-}
-
-int32_t ACMGenericCodec::ConfigISACBandwidthEstimator(
- const uint8_t /* init_frame_size_msec */,
- const uint16_t /* init_rate_bit_per_sec */,
- const bool /* enforce_frame_size */) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, unique_id_,
- "The send-codec is not iSAC, failed to config iSAC bandwidth "
- "estimator.");
- return -1;
-}
-
-int32_t ACMGenericCodec::SetISACMaxRate(
- const uint32_t /* max_rate_bit_per_sec */) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, unique_id_,
- "The send-codec is not iSAC, failed to set iSAC max rate.");
- return -1;
-}
-
-int32_t ACMGenericCodec::SetISACMaxPayloadSize(
- const uint16_t /* max_payload_len_bytes */) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, unique_id_,
- "The send-codec is not iSAC, failed to set iSAC max "
- "payload-size.");
- return -1;
-}
-
-void ACMGenericCodec::SaveDecoderParam(
- const WebRtcACMCodecParams* codec_params) {
- WriteLockScoped wl(codec_wrapper_lock_);
- SaveDecoderParamSafe(codec_params);
-}
-
-void ACMGenericCodec::SaveDecoderParamSafe(
- const WebRtcACMCodecParams* codec_params) {
- memcpy(&decoder_params_, codec_params, sizeof(WebRtcACMCodecParams));
-}
-
-int16_t ACMGenericCodec::UpdateEncoderSampFreq(
- uint16_t /* samp_freq_hz */) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "It is asked for a change in smapling frequency while the "
- "current send-codec supports only one sampling rate.");
- return -1;
-}
-
-void ACMGenericCodec::SetIsMaster(bool is_master) {
- WriteLockScoped wl(codec_wrapper_lock_);
- is_master_ = is_master;
-}
-
-int16_t ACMGenericCodec::REDPayloadISAC(const int32_t /* isac_rate */,
- const int16_t /* isac_bw_estimate */,
- uint8_t* /* payload */,
- int16_t* /* payload_len_bytes */) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Error: REDPayloadISAC is an iSAC specific function");
- return -1;
-}
-
-bool ACMGenericCodec::IsTrueStereoCodec() { return false; }
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_generic_codec.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_generic_codec.h
deleted file mode 100644
index c1f9cdc554a..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_generic_codec.h
+++ /dev/null
@@ -1,1224 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GENERIC_CODEC_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GENERIC_CODEC_H_
-
-#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#define MAX_FRAME_SIZE_10MSEC 6
-
-// forward declaration
-struct WebRtcVadInst;
-struct WebRtcCngEncInst;
-
-namespace webrtc {
-
-// forward declaration
-struct CodecInst;
-struct WebRtcACMCodecParams;
-
-namespace acm1 {
-
-class ACMNetEQ;
-
-class ACMGenericCodec {
- public:
- ///////////////////////////////////////////////////////////////////////////
- // Constructor of the class
- //
- ACMGenericCodec();
-
- ///////////////////////////////////////////////////////////////////////////
- // Destructor of the class.
- //
- virtual ~ACMGenericCodec();
-
- ///////////////////////////////////////////////////////////////////////////
- // ACMGenericCodec* CreateInstance();
- // The function will be used for FEC. It is not implemented yet.
- //
- virtual ACMGenericCodec* CreateInstance() = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t Encode()
- // The function is called to perform an encoding of the audio stored in
- // audio buffer. An encoding is performed only if enough audio, i.e. equal
- // to the frame-size of the codec, exist. The audio frame will be processed
- // by VAD and CN/DTX if required. There are few different cases.
- //
- // A) Neither VAD nor DTX is active; the frame is encoded by the encoder.
- //
- // B) VAD is enabled but not DTX; in this case the audio is processed by VAD
- // and encoded by the encoder. The "*encoding_type" will be either
- // "kActiveNormalEncode" or "kPassiveNormalEncode" if frame is active or
- // passive, respectively.
- //
- // C) DTX is enabled; if the codec has internal VAD/DTX we just encode the
- // frame by the encoder. Otherwise, the frame is passed through VAD and
- // if identified as passive, then it will be processed by CN/DTX. If the
- // frame is active it will be encoded by the encoder.
- //
- // This function acquires the appropriate locks and calls EncodeSafe() for
- // the actual processing.
- //
- // Outputs:
- // -bitstream : a buffer where bit-stream will be written to.
- // -bitstream_len_byte : contains the length of the bit-stream in
- // bytes.
- // -timestamp : contains the RTP timestamp, this is the
- // sampling time of the first sample encoded
- // (measured in number of samples).
- // -encoding_type : contains the type of encoding applied on the
- // audio samples. The alternatives are
- // (c.f. acm_common_types.h)
- // -kNoEncoding:
- // there was not enough data to encode. or
- // some error has happened that we could
- // not do encoding.
- // -kActiveNormalEncoded:
- // the audio frame is active and encoded by
- // the given codec.
- // -kPassiveNormalEncoded:
- // the audio frame is passive but coded with
- // the given codec (NO DTX).
- // -kPassiveDTXWB:
- // The audio frame is passive and used
- // wide-band CN to encode.
- // -kPassiveDTXNB:
- // The audio frame is passive and used
- // narrow-band CN to encode.
- //
- // Return value:
- // -1 if error is occurred, otherwise the length of the bit-stream in
- // bytes.
- //
- int16_t Encode(uint8_t* bitstream,
- int16_t* bitstream_len_byte,
- uint32_t* timestamp,
- WebRtcACMEncodingType* encoding_type);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t Decode()
- // This function is used to decode a given bit-stream, without engaging
- // NetEQ.
- //
- // This function acquires the appropriate locks and calls DecodeSafe() for
- // the actual processing. Please note that this is not functional yet.
- //
- // Inputs:
- // -bitstream : a buffer where bit-stream will be read.
- // -bitstream_len_byte : the length of the bit-stream in bytes.
- //
- // Outputs:
- // -audio : pointer to a buffer where the audio will written.
- // -audio_samples : number of audio samples out of decoding the given
- // bit-stream.
- // -speech_type : speech type (for future use).
- //
- // Return value:
- // -1 if failed to decode,
- // 0 if succeeded.
- //
- int16_t Decode(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type);
-
- ///////////////////////////////////////////////////////////////////////////
- // void SplitStereoPacket()
- // This function is used to split stereo payloads in left and right channel.
- // Codecs which has stereo support has there own implementation of the
- // function.
- //
- // Input/Output:
- // -payload : a vector with the received payload data.
- // The function will reorder the data so that
- // first half holds the left channel data, and the
- // second half the right channel data.
- // -payload_length : length of payload in bytes. Will be changed to
- // twice the input in case of true stereo, where
- // we simply copy the data and return it both for
- // left channel and right channel decoding.
- //
- virtual void SplitStereoPacket(uint8_t* /* payload */,
- int32_t* /* payload_length */) {}
-
- ///////////////////////////////////////////////////////////////////////////
- // bool EncoderInitialized();
- //
- // Return value:
- // True if the encoder is successfully initialized,
- // false otherwise.
- //
- bool EncoderInitialized();
-
- ///////////////////////////////////////////////////////////////////////////
- // bool DecoderInitialized();
- //
- // Return value:
- // True if the decoder is successfully initialized,
- // false otherwise.
- //
- bool DecoderInitialized();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t EncoderParams()
- // It is called to get encoder parameters. It will call
- // EncoderParamsSafe() in turn.
- //
- // Output:
- // -enc_params : a buffer where the encoder parameters is
- // written to. If the encoder is not
- // initialized this buffer is filled with
- // invalid values
- // Return value:
- // -1 if the encoder is not initialized,
- // 0 otherwise.
- //
- int16_t EncoderParams(WebRtcACMCodecParams *enc_params);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t DecoderParams(...)
- // It is called to get decoder parameters. It will call DecoderParamsSafe()
- // in turn.
- //
- // Output:
- // -dec_params : a buffer where the decoder parameters is
- // written to. If the decoder is not initialized
- // this buffer is filled with invalid values
- //
- // Return value:
- // -1 if the decoder is not initialized,
- // 0 otherwise.
- //
- //
- bool DecoderParams(WebRtcACMCodecParams *dec_params,
- const uint8_t payload_type);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t InitEncoder(...)
- // This function is called to initialize the encoder with the given
- // parameters.
- //
- // Input:
- // -codec_params : parameters of encoder.
- // -force_initialization: if false the initialization is invoked only if
- // the encoder is not initialized. If true the
- // encoder is forced to (re)initialize.
- //
- // Return value:
- // 0 if could initialize successfully,
- // -1 if failed to initialize.
- //
- //
- int16_t InitEncoder(WebRtcACMCodecParams* codec_params,
- bool force_initialization);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t InitDecoder()
- // This function is called to initialize the decoder with the given
- // parameters. (c.f. acm_common_defs.h & common_types.h for the
- // definition of the structure)
- //
- // Input:
- // -codec_params : parameters of decoder.
- // -force_initialization: if false the initialization is invoked only
- // if the decoder is not initialized. If true
- // the encoder is forced to(re)initialize.
- //
- // Return value:
- // 0 if could initialize successfully,
- // -1 if failed to initialize.
- //
- //
- int16_t InitDecoder(WebRtcACMCodecParams* codec_params,
- bool force_initialization);
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t RegisterInNetEq(...)
- // This function is called to register the decoder in NetEq, with the given
- // payload type.
- //
- // Inputs:
- // -neteq : pointer to NetEq Instance
- // -codec_inst : instance with of the codec settings of the codec
- //
- // Return values
- // -1 if failed to register,
- // 0 if successfully initialized.
- //
- int32_t RegisterInNetEq(ACMNetEQ* neteq, const CodecInst& codec_inst);
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t Add10MsData(...)
- // This function is called to add 10 ms of audio to the audio buffer of
- // the codec.
- //
- // Inputs:
- // -timestamp : the timestamp of the 10 ms audio. the timestamp
- // is the sampling time of the
- // first sample measured in number of samples.
- // -data : a buffer that contains the audio. The codec
- // expects to get the audio in correct sampling
- // frequency
- // -length : the length of the audio buffer
- // -audio_channel : 0 for mono, 1 for stereo (not supported yet)
- //
- // Return values:
- // -1 if failed
- // 0 otherwise.
- //
- int32_t Add10MsData(const uint32_t timestamp,
- const int16_t* data,
- const uint16_t length,
- const uint8_t audio_channel);
-
- ///////////////////////////////////////////////////////////////////////////
- // uint32_t NoMissedSamples()
- // This function returns the number of samples which are overwritten in
- // the audio buffer. The audio samples are overwritten if the input audio
- // buffer is full, but Add10MsData() is called. (We might remove this
- // function if it is not used)
- //
- // Return Value:
- // Number of samples which are overwritten.
- //
- uint32_t NoMissedSamples() const;
-
- ///////////////////////////////////////////////////////////////////////////
- // void ResetNoMissedSamples()
- // This function resets the number of overwritten samples to zero.
- // (We might remove this function if we remove NoMissedSamples())
- //
- void ResetNoMissedSamples();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t SetBitRate()
- // The function is called to set the encoding rate.
- //
- // Input:
- // -bitrate_bps : encoding rate in bits per second
- //
- // Return value:
- // -1 if failed to set the rate, due to invalid input or given
- // codec is not rate-adjustable.
- // 0 if the rate is adjusted successfully
- //
- int16_t SetBitRate(const int32_t bitrate_bps);
-
- ///////////////////////////////////////////////////////////////////////////
- // DestructEncoderInst()
- // This API is used in conferencing. It will free the memory that is pointed
- // by |ptr_inst|. |ptr_inst| is a pointer to encoder instance, created and
- // filled up by calling EncoderInst(...).
- //
- // Inputs:
- // -ptr_inst : pointer to an encoder instance to be deleted.
- //
- //
- void DestructEncoderInst(void* ptr_inst);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t AudioBuffer()
- // This is used when synchronization of codecs is required. There are cases
- // that the audio buffers of two codecs have to be synched. By calling this
- // function on can get the audio buffer and other related parameters, such
- // as timestamps...
- //
- // Output:
- // -audio_buff : a pointer to WebRtcACMAudioBuff where the audio
- // buffer of this codec will be written to.
- //
- // Return value:
- // -1 if fails to copy the audio buffer,
- // 0 if succeeded.
- //
- int16_t AudioBuffer(WebRtcACMAudioBuff& audio_buff);
-
- ///////////////////////////////////////////////////////////////////////////
- // uint32_t EarliestTimestamp()
- // Returns the timestamp of the first 10 ms in audio buffer. This is used
- // to identify if a synchronization of two encoders is required.
- //
- // Return value:
- // timestamp of the first 10 ms audio in the audio buffer.
- //
- uint32_t EarliestTimestamp() const;
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t SetAudioBuffer()
- // This function is called to set the audio buffer and the associated
- // parameters to a given value.
- //
- // Return value:
- // -1 if fails to copy the audio buffer,
- // 0 if succeeded.
- //
- int16_t SetAudioBuffer(WebRtcACMAudioBuff& audio_buff);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t SetVAD()
- // This is called to set VAD & DTX. If the codec has internal DTX, it will
- // be used. If DTX is enabled and the codec does not have internal DTX,
- // WebRtc-VAD will be used to decide if the frame is active. If DTX is
- // disabled but VAD is enabled the audio is passed through VAD to label it
- // as active or passive, but the frame is encoded normally. However the
- // bit-stream is labeled properly so that ACM::Process() can use this
- // information. In case of failure, the previous states of the VAD & DTX
- // are kept.
- //
- // Input/Output:
- // -enable_dtx : if true DTX will be enabled otherwise the DTX is
- // disabled. If codec has internal DTX that will be
- // used, otherwise WebRtc-CNG is used. In the latter
- // case VAD is automatically activated.
- // -enable_vad : if true WebRtc-VAD is enabled, otherwise VAD is
- // disabled, except for the case that DTX is enabled
- // but codec doesn't have internal DTX. In this case
- // VAD is enabled regardless of the value of
- // |enable_vad|.
- // -mode : this specifies the aggressiveness of VAD.
- //
- // Return value
- // -1 if failed to set DTX & VAD as specified,
- // 0 if succeeded.
- //
- int16_t SetVAD(bool* enable_dtx,
- bool* enable_vad,
- ACMVADMode* mode);
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t ReplaceInternalDTX()
- // This is called to replace the codec internal DTX with WebRtc DTX.
- // This is only valid for G729 where the user has possibility to replace
- // AnnexB with WebRtc DTX. For other codecs this function has no effect.
- //
- // Input:
- // -replace_internal_dtx : if true the internal DTX is replaced with WebRtc.
- //
- // Return value
- // -1 if failed to replace internal DTX,
- // 0 if succeeded.
- //
- int32_t ReplaceInternalDTX(const bool replace_internal_dtx);
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t IsInternalDTXReplaced()
- // This is called to check if the codec internal DTX is replaced by WebRtc
- // DTX. This is only valid for G729 where the user has possibility to replace
- // AnnexB with WebRtc DTX. For other codecs this function has no effect.
- //
- // Output:
- // -internal_dtx_replaced: if true the internal DTX is replaced with WebRtc.
- //
- // Return value
- // -1 if failed to check
- // 0 if succeeded.
- //
- int32_t IsInternalDTXReplaced(bool* internal_dtx_replaced);
-
- ///////////////////////////////////////////////////////////////////////////
- // void SetNetEqDecodeLock()
- // Passes the NetEq lock to the codec.
- //
- // Input:
- // -neteq_decode_lock : pointer to the lock associated with NetEQ of ACM.
- //
- void SetNetEqDecodeLock(RWLockWrapper* neteq_decode_lock) {
- neteq_decode_lock_ = neteq_decode_lock;
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // bool HasInternalDTX()
- // Used to check if the codec has internal DTX.
- //
- // Return value:
- // true if the codec has an internal DTX, e.g. G729,
- // false otherwise.
- //
- bool HasInternalDTX() const {
- return has_internal_dtx_;
- }
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t GetEstimatedBandwidth()
- // Used to get decoder estimated bandwidth. Only iSAC will provide a value.
- //
- //
- // Return value:
- // -1 if fails to get decoder estimated bandwidth,
- // >0 estimated bandwidth in bits/sec.
- //
- int32_t GetEstimatedBandwidth();
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t SetEstimatedBandwidth()
- // Used to set estiamted bandwidth sent out of band from other side. Only
- // iSAC will have use for the value.
- //
- // Input:
- // -estimated_bandwidth: estimated bandwidth in bits/sec
- //
- // Return value:
- // -1 if fails to set estimated bandwidth,
- // 0 on success.
- //
- int32_t SetEstimatedBandwidth(int32_t estimated_bandwidth);
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t GetRedPayload()
- // Used to get codec specific RED payload (if such is implemented).
- // Currently only done in iSAC.
- //
- // Outputs:
- // -red_payload : a pointer to the data for RED payload.
- // -payload_bytes : number of bytes in RED payload.
- //
- // Return value:
- // -1 if fails to get codec specific RED,
- // 0 if succeeded.
- //
- int32_t GetRedPayload(uint8_t* red_payload,
- int16_t* payload_bytes);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t ResetEncoder()
- // By calling this function you would re-initialize the encoder with the
- // current parameters. All the settings, e.g. VAD/DTX, frame-size... should
- // remain unchanged. (In case of iSAC we don't want to lose BWE history.)
- //
- // Return value
- // -1 if failed,
- // 0 if succeeded.
- //
- int16_t ResetEncoder();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t ResetEncoder()
- // By calling this function you would re-initialize the decoder with the
- // current parameters.
- //
- // Return value
- // -1 if failed,
- // 0 if succeeded.
- //
- int16_t ResetDecoder(int16_t payload_type);
-
- ///////////////////////////////////////////////////////////////////////////
- // void DestructEncoder()
- // This function is called to delete the encoder instance, if possible, to
- // have a fresh start. For codecs where encoder and decoder share the same
- // instance we cannot delete the encoder and instead we will initialize the
- // encoder. We also delete VAD and DTX if they have been created.
- //
- void DestructEncoder();
-
- ///////////////////////////////////////////////////////////////////////////
- // void DestructDecoder()
- // This function is called to delete the decoder instance, if possible, to
- // have a fresh start. For codecs where encoder and decoder share the same
- // instance we cannot delete the encoder and instead we will initialize the
- // decoder. Before deleting decoder instance it has to be removed from the
- // NetEq list.
- //
- void DestructDecoder();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t SamplesLeftToEncode()
- // Returns the number of samples required to be able to do encoding.
- //
- // Return value:
- // Number of samples.
- //
- int16_t SamplesLeftToEncode();
-
- ///////////////////////////////////////////////////////////////////////////
- // uint32_t LastEncodedTimestamp()
- // Returns the timestamp of the last frame it encoded.
- //
- // Return value:
- // Timestamp.
- //
- uint32_t LastEncodedTimestamp() const;
-
- ///////////////////////////////////////////////////////////////////////////
- // SetUniqueID()
- // Set a unique ID for the codec to be used for tracing and debugging
- //
- // Input
- // -id : A number to identify the codec.
- //
- void SetUniqueID(const uint32_t id);
-
- ///////////////////////////////////////////////////////////////////////////
- // IsAudioBufferFresh()
- // Specifies if ever audio is injected to this codec.
- //
- // Return value
- // -true; no audio is feed into this codec
- // -false; audio has already been fed to the codec.
- //
- bool IsAudioBufferFresh() const;
-
- ///////////////////////////////////////////////////////////////////////////
- // UpdateDecoderSampFreq()
- // For most of the codecs this function does nothing. It must be
- // implemented for those codecs that one codec instance serves as the
- // decoder for different flavors of the codec. One example is iSAC. there,
- // iSAC 16 kHz and iSAC 32 kHz are treated as two different codecs with
- // different payload types, however, there is only one iSAC instance to
- // decode. The reason for that is we would like to decode and encode with
- // the same codec instance for bandwidth estimator to work.
- //
- // Each time that we receive a new payload type, we call this function to
- // prepare the decoder associated with the new payload. Normally, decoders
- // doesn't have to do anything. For iSAC the decoder has to change it's
- // sampling rate. The input parameter specifies the current flavor of the
- // codec in codec database. For instance, if we just got a SWB payload then
- // the input parameter is ACMCodecDB::isacswb.
- //
- // Input:
- // -codec_id : the ID of the codec associated with the
- // payload type that we just received.
- //
- // Return value:
- // 0 if succeeded in updating the decoder.
- // -1 if failed to update.
- //
- virtual int16_t UpdateDecoderSampFreq(int16_t /* codec_id */);
-
- ///////////////////////////////////////////////////////////////////////////
- // UpdateEncoderSampFreq()
- // Call this function to update the encoder sampling frequency. This
- // is for codecs where one payload-name supports several encoder sampling
- // frequencies. Otherwise, to change the sampling frequency we need to
- // register new codec. ACM will consider that as registration of a new
- // codec, not a change in parameter. For iSAC, switching from WB to SWB
- // is treated as a change in parameter. Therefore, we need this function.
- //
- // Input:
- // -samp_freq_hz : encoder sampling frequency.
- //
- // Return value:
- // -1 if failed, or if this is meaningless for the given codec.
- // 0 if succeeded.
- //
- virtual int16_t UpdateEncoderSampFreq(
- uint16_t samp_freq_hz);
-
- ///////////////////////////////////////////////////////////////////////////
- // EncoderSampFreq()
- // Get the sampling frequency that the encoder (WebRtc wrapper) expects.
- //
- // Output:
- // -samp_freq_hz : sampling frequency, in Hertz, which the encoder
- // should be fed with.
- //
- // Return value:
- // -1 if failed to output sampling rate.
- // 0 if the sample rate is returned successfully.
- //
- virtual int16_t EncoderSampFreq(uint16_t& samp_freq_hz);
-
- ///////////////////////////////////////////////////////////////////////////
- // int32_t ConfigISACBandwidthEstimator()
- // Call this function to configure the bandwidth estimator of ISAC.
- // During the adaptation of bit-rate, iSAC automatically adjusts the
- // frame-size (either 30 or 60 ms) to save on RTP header. The initial
- // frame-size can be specified by the first argument. The configuration also
- // regards the initial estimate of bandwidths. The estimator starts from
- // this point and converges to the actual bottleneck. This is given by the
- // second parameter. Furthermore, it is also possible to control the
- // adaptation of frame-size. This is specified by the last parameter.
- //
- // Input:
- // -init_frame_fize_ms : initial frame-size in milliseconds. For iSAC-wb
- // 30 ms and 60 ms (default) are acceptable values,
- // and for iSAC-swb 30 ms is the only acceptable
- // value. Zero indicates default value.
- // -init_rate_bps : initial estimate of the bandwidth. Values
- // between 10000 and 58000 are acceptable.
- // -enforce_frame_size : if true, the frame-size will not be adapted.
- //
- // Return value:
- // -1 if failed to configure the bandwidth estimator,
- // 0 if the configuration was successfully applied.
- //
- virtual int32_t ConfigISACBandwidthEstimator(
- const uint8_t init_frame_size_msec,
- const uint16_t init_rate_bps,
- const bool enforce_frame_size);
-
- ///////////////////////////////////////////////////////////////////////////
- // SetISACMaxPayloadSize()
- // Set the maximum payload size of iSAC packets. No iSAC payload,
- // regardless of its frame-size, may exceed the given limit. For
- // an iSAC payload of size B bits and frame-size T sec we have;
- // (B < max_payload_len_bytes * 8) and (B/T < max_rate_bit_per_sec), c.f.
- // SetISACMaxRate().
- //
- // Input:
- // -max_payload_len_bytes : maximum payload size in bytes.
- //
- // Return value:
- // -1 if failed to set the maximum payload-size.
- // 0 if the given length is set successfully.
- //
- virtual int32_t SetISACMaxPayloadSize(
- const uint16_t max_payload_len_bytes);
-
- ///////////////////////////////////////////////////////////////////////////
- // SetISACMaxRate()
- // Set the maximum instantaneous rate of iSAC. For a payload of B bits
- // with a frame-size of T sec the instantaneous rate is B/T bits per
- // second. Therefore, (B/T < max_rate_bit_per_sec) and
- // (B < max_payload_len_bytes * 8) are always satisfied for iSAC payloads,
- // c.f SetISACMaxPayloadSize().
- //
- // Input:
- // -max_rate_bps : maximum instantaneous bit-rate given in bits/sec.
- //
- // Return value:
- // -1 if failed to set the maximum rate.
- // 0 if the maximum rate is set successfully.
- //
- virtual int32_t SetISACMaxRate(const uint32_t max_rate_bps);
-
- ///////////////////////////////////////////////////////////////////////////
- // SaveDecoderParamS()
- // Save the parameters of decoder.
- //
- // Input:
- // -codec_params : pointer to a structure where the parameters of
- // decoder is stored in.
- //
- void SaveDecoderParam(const WebRtcACMCodecParams* codec_params);
-
- int32_t FrameSize() {
- return frame_len_smpl_;
- }
-
- void SetIsMaster(bool is_master);
-
- ///////////////////////////////////////////////////////////////////////////
- // REDPayloadISAC()
- // This is an iSAC-specific function. The function is called to get RED
- // payload from a default-encoder.
- //
- // Inputs:
- // -isac_rate : the target rate of the main payload. A RED
- // payload is generated according to the rate of
- // main payload. Note that we are not specifying the
- // rate of RED payload, but the main payload.
- // -isac_bw_estimate : bandwidth information should be inserted in
- // RED payload.
- //
- // Output:
- // -payload : pointer to a buffer where the RED payload will
- // written to.
- // -payload_len_bytes : a place-holder to write the length of the RED
- // payload in Bytes.
- //
- // Return value:
- // -1 if an error occurs, otherwise the length of the payload (in Bytes)
- // is returned.
- //
- virtual int16_t REDPayloadISAC(const int32_t isac_rate,
- const int16_t isac_bw_estimate,
- uint8_t* payload,
- int16_t* payload_len_bytes);
-
- ///////////////////////////////////////////////////////////////////////////
- // IsTrueStereoCodec()
- // Call to see if current encoder is a true stereo codec. This function
- // should be overwritten for codecs which are true stereo codecs
- // Return value:
- // -true if stereo codec
- // -false if not stereo codec.
- //
- virtual bool IsTrueStereoCodec();
-
- ///////////////////////////////////////////////////////////////////////////
- // HasFrameToEncode()
- // Returns true if there is enough audio buffered for encoding, such that
- // calling Encode() will return a payload.
- //
- bool HasFrameToEncode() const;
-
- protected:
- ///////////////////////////////////////////////////////////////////////////
- // All the functions with FunctionNameSafe(...) contain the actual
- // implementation of FunctionName(...). FunctionName() acquires an
- // appropriate lock and calls FunctionNameSafe() to do the actual work.
- // Therefore, for the description of functionality, input/output arguments
- // and return value we refer to FunctionName()
- //
-
- ///////////////////////////////////////////////////////////////////////////
- // See Decode() for the description of function, input(s)/output(s) and
- // return value.
- //
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // See Add10MsSafe() for the description of function, input(s)/output(s)
- // and return value.
- //
- virtual int32_t Add10MsDataSafe(const uint32_t timestamp,
- const int16_t* data,
- const uint16_t length,
- const uint8_t audio_channel);
-
- ///////////////////////////////////////////////////////////////////////////
- // See RegisterInNetEq() for the description of function,
- // input(s)/output(s) and return value.
- //
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // See EncoderParam() for the description of function, input(s)/output(s)
- // and return value.
- //
- int16_t EncoderParamsSafe(WebRtcACMCodecParams *enc_params);
-
- ///////////////////////////////////////////////////////////////////////////
- // See DecoderParam for the description of function, input(s)/output(s)
- // and return value.
- //
- // Note:
- // Any Class where a single instance handle several flavors of the
- // same codec, therefore, several payload types are associated with
- // the same instance have to implement this function.
- //
- // Currently only iSAC is implementing it. A single iSAC instance is
- // used for decoding both WB & SWB stream. At one moment both WB & SWB
- // can be registered as receive codec. Hence two payloads are associated
- // with a single codec instance.
- //
- virtual bool DecoderParamsSafe(WebRtcACMCodecParams *dec_params,
- const uint8_t payload_type);
-
- ///////////////////////////////////////////////////////////////////////////
- // See ResetEncoder() for the description of function, input(s)/output(s)
- // and return value.
- //
- int16_t ResetEncoderSafe();
-
- ///////////////////////////////////////////////////////////////////////////
- // See InitEncoder() for the description of function, input(s)/output(s)
- // and return value.
- //
- int16_t InitEncoderSafe(WebRtcACMCodecParams *codec_params,
- bool force_initialization);
-
- ///////////////////////////////////////////////////////////////////////////
- // See InitDecoder() for the description of function, input(s)/output(s)
- // and return value.
- //
- int16_t InitDecoderSafe(WebRtcACMCodecParams *codec_params,
- bool force_initialization);
-
- ///////////////////////////////////////////////////////////////////////////
- // See ResetDecoder() for the description of function, input(s)/output(s)
- // and return value.
- //
- int16_t ResetDecoderSafe(int16_t payload_type);
-
- ///////////////////////////////////////////////////////////////////////////
- // See DestructEncoder() for the description of function,
- // input(s)/output(s) and return value.
- //
- virtual void DestructEncoderSafe() = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // See DestructDecoder() for the description of function,
- // input(s)/output(s) and return value.
- //
- virtual void DestructDecoderSafe() = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // See SetBitRate() for the description of function, input(s)/output(s)
- // and return value.
- //
- // Any codec that can change the bit-rate has to implement this.
- //
- virtual int16_t SetBitRateSafe(const int32_t bitrate_bps);
-
- ///////////////////////////////////////////////////////////////////////////
- // See GetEstimatedBandwidth() for the description of function,
- // input(s)/output(s) and return value.
- //
- virtual int32_t GetEstimatedBandwidthSafe();
-
- ///////////////////////////////////////////////////////////////////////////
- // See SetEstimatedBandwidth() for the description of function,
- // input(s)/output(s) and return value.
- //
- virtual int32_t SetEstimatedBandwidthSafe(
- int32_t estimated_bandwidth);
-
- ///////////////////////////////////////////////////////////////////////////
- // See GetRedPayload() for the description of function, input(s)/output(s)
- // and return value.
- //
- virtual int32_t GetRedPayloadSafe(uint8_t* red_payload,
- int16_t* payload_bytes);
-
- ///////////////////////////////////////////////////////////////////////////
- // See SetVAD() for the description of function, input(s)/output(s) and
- // return value.
- //
- int16_t SetVADSafe(bool* enable_dtx,
- bool* enable_vad,
- ACMVADMode* mode);
-
- ///////////////////////////////////////////////////////////////////////////
- // See ReplaceInternalDTX() for the description of function, input and
- // return value.
- //
- virtual int32_t ReplaceInternalDTXSafe(const bool replace_internal_dtx);
-
- ///////////////////////////////////////////////////////////////////////////
- // See IsInternalDTXReplaced() for the description of function, input and
- // return value.
- //
- virtual int32_t IsInternalDTXReplacedSafe(bool* internal_dtx_replaced);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t CreateEncoder()
- // Creates the encoder instance.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- int16_t CreateEncoder();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t CreateDecoder()
- // Creates the decoder instance.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- int16_t CreateDecoder();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t EnableVAD();
- // Enables VAD with the given mode. The VAD instance will be created if
- // it does not exists.
- //
- // Input:
- // -mode : VAD mode c.f. audio_coding_module_typedefs.h for
- // the options.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- int16_t EnableVAD(ACMVADMode mode);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t DisableVAD()
- // Disables VAD.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- int16_t DisableVAD();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t EnableDTX()
- // Enables DTX. This method should be overwritten for codecs which have
- // internal DTX.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- virtual int16_t EnableDTX();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t DisableDTX()
- // Disables usage of DTX. This method should be overwritten for codecs which
- // have internal DTX.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- virtual int16_t DisableDTX();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t InternalEncode()
- // This is a codec-specific function called in EncodeSafe() to actually
- // encode a frame of audio.
- //
- // Outputs:
- // -bitstream : pointer to a buffer where the bit-stream is
- // written to.
- // -bitstream_len_byte : the length of the bit-stream in bytes,
- // a negative value indicates error.
- //
- // Return value:
- // -1 if failed,
- // otherwise the length of the bit-stream is returned.
- //
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t InternalInitEncoder()
- // This is a codec-specific function called in InitEncoderSafe(), it has to
- // do all codec-specific operation to initialize the encoder given the
- // encoder parameters.
- //
- // Input:
- // -codec_params : pointer to a structure that contains parameters to
- // initialize encoder.
- // Set codec_params->codec_inst.rate to -1 for
- // iSAC to operate in adaptive mode.
- // (to do: if frame-length is -1 frame-length will be
- // automatically adjusted, otherwise, given
- // frame-length is forced)
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams *codec_params) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t InternalInitDecoder()
- // This is a codec-specific function called in InitDecoderSafe(), it has to
- // do all codec-specific operation to initialize the decoder given the
- // decoder parameters.
- //
- // Input:
- // -codec_params : pointer to a structure that contains parameters to
- // initialize encoder.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams *codec_params) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // void IncreaseNoMissedSamples()
- // This method is called to increase the number of samples that are
- // overwritten in the audio buffer.
- //
- // Input:
- // -num_samples : the number of overwritten samples is incremented
- // by this value.
- //
- void IncreaseNoMissedSamples(const int16_t num_samples);
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t InternalCreateEncoder()
- // This is a codec-specific method called in CreateEncoderSafe() it is
- // supposed to perform all codec-specific operations to create encoder
- // instance.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- virtual int16_t InternalCreateEncoder() = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t InternalCreateDecoder()
- // This is a codec-specific method called in CreateDecoderSafe() it is
- // supposed to perform all codec-specific operations to create decoder
- // instance.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- virtual int16_t InternalCreateDecoder() = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // void InternalDestructEncoderInst()
- // This is a codec-specific method, used in conferencing, called from
- // DestructEncoderInst(). The input argument is pointer to encoder instance
- // (codec instance for codecs that encoder and decoder share the same
- // instance). This method is called to free the memory that |ptr_inst| is
- // pointing to.
- //
- // Input:
- // -ptr_inst : pointer to encoder instance.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- virtual void InternalDestructEncoderInst(void* ptr_inst) = 0;
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t InternalResetEncoder()
- // This method is called to reset the states of encoder. However, the
- // current parameters, e.g. frame-length, should remain as they are. For
- // most of the codecs a re-initialization of the encoder is what needs to
- // be down. But for iSAC we like to keep the BWE history so we cannot
- // re-initialize. As soon as such an API is implemented in iSAC this method
- // has to be overwritten in ACMISAC class.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- virtual int16_t InternalResetEncoder();
-
- ///////////////////////////////////////////////////////////////////////////
- // int16_t ProcessFrameVADDTX()
- // This function is called when a full frame of audio is available. It will
- // break the audio frame into blocks such that each block could be processed
- // by VAD & CN/DTX. If a frame is divided into two blocks then there are two
- // cases. First, the first block is active, the second block will not be
- // processed by CN/DTX but only by VAD and return to caller with
- // '*samples_processed' set to zero. There, the audio frame will be encoded
- // by the encoder. Second, the first block is inactive and is processed by
- // CN/DTX, then we stop processing the next block and return to the caller
- // which is EncodeSafe(), with "*samples_processed" equal to the number of
- // samples in first block.
- //
- // Output:
- // -bitstream : pointer to a buffer where DTX frame, if
- // generated, will be written to.
- // -bitstream_len_byte : contains the length of bit-stream in bytes, if
- // generated. Zero if no bit-stream is generated.
- // -samples_processed : contains no of samples that actually CN has
- // processed. Those samples processed by CN will not
- // be encoded by the encoder, obviously. If
- // contains zero, it means that the frame has been
- // identified as active by VAD. Note that
- // "*samples_processed" might be non-zero but
- // "*bitstream_len_byte" be zero.
- //
- // Return value:
- // -1 if failed,
- // 0 if succeeded.
- //
- int16_t ProcessFrameVADDTX(uint8_t* bitstream,
- int16_t* bitstream_len_byte,
- int16_t* samples_processed);
-
- ///////////////////////////////////////////////////////////////////////////
- // CanChangeEncodingParam()
- // Check if the codec parameters can be changed. In conferencing normally
- // codec parameters cannot be changed. The exception is bit-rate of isac.
- //
- // return value:
- // -true if codec parameters are allowed to change.
- // -false otherwise.
- //
- virtual bool CanChangeEncodingParam(CodecInst& codec_inst);
-
- ///////////////////////////////////////////////////////////////////////////
- // CurrentRate()
- // Call to get the current encoding rate of the encoder. This function
- // should be overwritten for codecs which automatically change their
- // target rate. One example is iSAC. The output of the function is the
- // current target rate.
- //
- // Output:
- // -rate_bps : the current target rate of the codec.
- //
- virtual void CurrentRate(int32_t& /* rate_bps */);
-
- virtual void SaveDecoderParamSafe(const WebRtcACMCodecParams* codec_params);
-
- // &in_audio_[in_audio_ix_write_] always point to where new audio can be
- // written to
- int16_t in_audio_ix_write_;
-
- // &in_audio_[in_audio_ix_read_] points to where audio has to be read from
- int16_t in_audio_ix_read_;
-
- int16_t in_timestamp_ix_write_;
-
- // Where the audio is stored before encoding,
- // To save memory the following buffer can be allocated
- // dynamically for 80 ms depending on the sampling frequency
- // of the codec.
- int16_t* in_audio_;
- uint32_t* in_timestamp_;
-
- int16_t frame_len_smpl_;
- uint16_t num_channels_;
-
- // This will point to a static database of the supported codecs
- int16_t codec_id_;
-
- // This will account for the number of samples were not encoded
- // the case is rare, either samples are missed due to overwrite
- // at input buffer or due to encoding error
- uint32_t num_missed_samples_;
-
- // True if the encoder instance created
- bool encoder_exist_;
- bool decoder_exist_;
- // True if the encoder instance initialized
- bool encoder_initialized_;
- bool decoder_initialized_;
-
- bool registered_in_neteq_;
-
- // VAD/DTX
- bool has_internal_dtx_;
- WebRtcVadInst* ptr_vad_inst_;
- bool vad_enabled_;
- ACMVADMode vad_mode_;
- int16_t vad_label_[MAX_FRAME_SIZE_10MSEC];
- bool dtx_enabled_;
- WebRtcCngEncInst* ptr_dtx_inst_;
- uint8_t num_lpc_params_;
- bool sent_cn_previous_;
- bool is_master_;
- int16_t prev_frame_cng_;
-
- WebRtcACMCodecParams encoder_params_;
- WebRtcACMCodecParams decoder_params_;
-
- // Used as a global lock for all available decoders
- // so that no decoder is used when NetEQ decodes.
- RWLockWrapper* neteq_decode_lock_;
- // Used to lock wrapper internal data
- // such as buffers and state variables.
- RWLockWrapper& codec_wrapper_lock_;
-
- uint32_t last_encoded_timestamp_;
- uint32_t last_timestamp_;
- bool is_audio_buff_fresh_;
- uint32_t unique_id_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GENERIC_CODEC_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_gsmfr.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_gsmfr.cc
deleted file mode 100644
index 5ea0c56d9f2..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_gsmfr.cc
+++ /dev/null
@@ -1,267 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_gsmfr.h"
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-#ifdef WEBRTC_CODEC_GSMFR
-// NOTE! GSM-FR is not included in the open-source package. Modify this file
-// or your codec API to match the function calls and names of used GSM-FR API
-// file.
-#include "gsmfr_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_GSMFR
-
-ACMGSMFR::ACMGSMFR(int16_t /* codec_id */)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL) {
- return;
-}
-
-ACMGSMFR::~ACMGSMFR() {
- return;
-}
-
-int16_t ACMGSMFR::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMGSMFR::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMGSMFR::EnableDTX() {
- return -1;
-}
-
-int16_t ACMGSMFR::DisableDTX() {
- return -1;
-}
-
-int16_t ACMGSMFR::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMGSMFR::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMGSMFR::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMGSMFR::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMGSMFR::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMGSMFR::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMGSMFR::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMGSMFR::DestructDecoderSafe() {
- return;
-}
-
-void ACMGSMFR::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-#else //===================== Actual Implementation =======================
-
-ACMGSMFR::ACMGSMFR(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL) {
- codec_id_ = codec_id;
- has_internal_dtx_ = true;
- return;
-}
-
-ACMGSMFR::~ACMGSMFR() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcGSMFR_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- WebRtcGSMFR_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- return;
-}
-
-int16_t ACMGSMFR::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- *bitstream_len_byte = WebRtcGSMFR_Encode(encoder_inst_ptr_,
- &in_audio_[in_audio_ix_read_],
- frame_len_smpl_,
- (int16_t*)bitstream);
- // increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += frame_len_smpl_;
- return *bitstream_len_byte;
-}
-
-int16_t ACMGSMFR::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMGSMFR::EnableDTX() {
- if (dtx_enabled_) {
- return 0;
- } else if (encoder_exist_) {
- if (WebRtcGSMFR_EncoderInit(encoder_inst_ptr_, 1) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "EnableDTX: cannot init encoder for GSMFR");
- return -1;
- }
- dtx_enabled_ = true;
- return 0;
- } else {
- return -1;
- }
-}
-
-int16_t ACMGSMFR::DisableDTX() {
- if (!dtx_enabled_) {
- return 0;
- } else if (encoder_exist_) {
- if (WebRtcGSMFR_EncoderInit(encoder_inst_ptr_, 0) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "DisableDTX: cannot init encoder for GSMFR");
- return -1;
- }
- dtx_enabled_ = false;
- return 0;
- } else {
- // encoder doesn't exists, therefore disabling is harmless
- return 0;
- }
-}
-
-int16_t ACMGSMFR::InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) {
- if (WebRtcGSMFR_EncoderInit(encoder_inst_ptr_,
- ((codec_params->enable_dtx) ? 1 : 0)) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitEncoder: cannot init encoder for GSMFR");
- }
- return 0;
-}
-
-int16_t ACMGSMFR::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- if (WebRtcGSMFR_DecoderInit(decoder_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitDecoder: cannot init decoder for GSMFR");
- return -1;
- }
- return 0;
-}
-
-int32_t ACMGSMFR::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CodecDef: decoder is not initialized for GSMFR");
- return -1;
- }
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_GSMFR_FUNCTION."
- // Then call NetEQ to add the codec to it's
- // database.
- SET_CODEC_PAR((codec_def), kDecoderGSMFR, codec_inst.pltype,
- decoder_inst_ptr_, 8000);
- SET_GSMFR_FUNCTIONS((codec_def));
- return 0;
-}
-
-ACMGenericCodec* ACMGSMFR::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMGSMFR::InternalCreateEncoder() {
- if (WebRtcGSMFR_CreateEnc(&encoder_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateEncoder: cannot create instance for GSMFR "
- "encoder");
- return -1;
- }
- return 0;
-}
-
-void ACMGSMFR::DestructEncoderSafe() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcGSMFR_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- encoder_exist_ = false;
- encoder_initialized_ = false;
-}
-
-int16_t ACMGSMFR::InternalCreateDecoder() {
- if (WebRtcGSMFR_CreateDec(&decoder_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateDecoder: cannot create instance for GSMFR "
- "decoder");
- return -1;
- }
- return 0;
-}
-
-void ACMGSMFR::DestructDecoderSafe() {
- if (decoder_inst_ptr_ != NULL) {
- WebRtcGSMFR_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- decoder_exist_ = false;
- decoder_initialized_ = false;
-}
-
-void ACMGSMFR::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WebRtcGSMFR_FreeEnc((GSMFR_encinst_t_*) ptr_inst);
- }
- return;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_gsmfr.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_gsmfr.h
deleted file mode 100644
index aa499734af9..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_gsmfr.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GSMFR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GSMFR_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct GSMFR_encinst_t_;
-struct GSMFR_decinst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMGSMFR : public ACMGenericCodec {
- public:
- explicit ACMGSMFR(int16_t codec_id);
- ~ACMGSMFR();
-
- // for FEC
- ACMGenericCodec* CreateInstance(void);
-
- int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte);
-
- int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
-
- int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
-
- protected:
- int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type);
-
- int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
-
- void DestructEncoderSafe();
-
- void DestructDecoderSafe();
-
- int16_t InternalCreateEncoder();
-
- int16_t InternalCreateDecoder();
-
- void InternalDestructEncoderInst(void* ptr_inst);
-
- int16_t EnableDTX();
-
- int16_t DisableDTX();
-
- GSMFR_encinst_t_* encoder_inst_ptr_;
- GSMFR_decinst_t_* decoder_inst_ptr_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GSMFR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_ilbc.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_ilbc.cc
deleted file mode 100644
index 0f8049e8047..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_ilbc.cc
+++ /dev/null
@@ -1,259 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#include "webrtc/modules/audio_coding/main/source/acm_ilbc.h"
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_ILBC
-#include "webrtc/modules/audio_coding/codecs/ilbc/interface/ilbc.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_ILBC
-
-ACMILBC::ACMILBC(int16_t /* codec_id */)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL) {
- return;
-}
-
-ACMILBC::~ACMILBC() {
- return;
-}
-
-int16_t ACMILBC::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMILBC::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMILBC::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMILBC::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMILBC::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMILBC::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMILBC::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMILBC::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMILBC::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMILBC::DestructDecoderSafe() {
- return;
-}
-
-void ACMILBC::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-int16_t ACMILBC::SetBitRateSafe(const int32_t /* rate */) {
- return -1;
-}
-
-#else //===================== Actual Implementation =======================
-
-ACMILBC::ACMILBC(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL) {
- codec_id_ = codec_id;
- return;
-}
-
-ACMILBC::~ACMILBC() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcIlbcfix_EncoderFree(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- WebRtcIlbcfix_DecoderFree(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- return;
-}
-
-int16_t ACMILBC::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- *bitstream_len_byte = WebRtcIlbcfix_Encode(encoder_inst_ptr_,
- &in_audio_[in_audio_ix_read_],
- frame_len_smpl_,
- (int16_t*)bitstream);
- if (*bitstream_len_byte < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalEncode: error in encode for ILBC");
- return -1;
- }
- // increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += frame_len_smpl_;
- return *bitstream_len_byte;
-}
-
-int16_t ACMILBC::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMILBC::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
- // initialize with a correct processing block length
- if ((160 == (codec_params->codec_inst).pacsize) ||
- (320 == (codec_params->codec_inst).pacsize)) {
- // processing block of 20ms
- return WebRtcIlbcfix_EncoderInit(encoder_inst_ptr_, 20);
- } else if ((240 == (codec_params->codec_inst).pacsize) ||
- (480 == (codec_params->codec_inst).pacsize)) {
- // processing block of 30ms
- return WebRtcIlbcfix_EncoderInit(encoder_inst_ptr_, 30);
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitEncoder: invalid processing block");
- return -1;
- }
-}
-
-int16_t ACMILBC::InternalInitDecoder(WebRtcACMCodecParams* codec_params) {
- // initialize with a correct processing block length
- if ((160 == (codec_params->codec_inst).pacsize) ||
- (320 == (codec_params->codec_inst).pacsize)) {
- // processing block of 20ms
- return WebRtcIlbcfix_DecoderInit(decoder_inst_ptr_, 20);
- } else if ((240 == (codec_params->codec_inst).pacsize) ||
- (480 == (codec_params->codec_inst).pacsize)) {
- // processing block of 30ms
- return WebRtcIlbcfix_DecoderInit(decoder_inst_ptr_, 30);
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalInitDecoder: invalid processing block");
- return -1;
- }
-}
-
-int32_t ACMILBC::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CodeDef: decoder not initialized for ILBC");
- return -1;
- }
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_ILBC_FUNCTION."
- // Then return the structure back to NetEQ to add the codec to it's
- // database.
- SET_CODEC_PAR((codec_def), kDecoderILBC, codec_inst.pltype, decoder_inst_ptr_,
- 8000);
- SET_ILBC_FUNCTIONS((codec_def));
- return 0;
-}
-
-ACMGenericCodec* ACMILBC::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMILBC::InternalCreateEncoder() {
- if (WebRtcIlbcfix_EncoderCreate(&encoder_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateEncoder: cannot create instance for ILBC "
- "encoder");
- return -1;
- }
- return 0;
-}
-
-void ACMILBC::DestructEncoderSafe() {
- encoder_initialized_ = false;
- encoder_exist_ = false;
- if (encoder_inst_ptr_ != NULL) {
- WebRtcIlbcfix_EncoderFree(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
-}
-
-int16_t ACMILBC::InternalCreateDecoder() {
- if (WebRtcIlbcfix_DecoderCreate(&decoder_inst_ptr_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalCreateDecoder: cannot create instance for ILBC "
- "decoder");
- return -1;
- }
- return 0;
-}
-
-void ACMILBC::DestructDecoderSafe() {
- decoder_initialized_ = false;
- decoder_exist_ = false;
- if (decoder_inst_ptr_ != NULL) {
- WebRtcIlbcfix_DecoderFree(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
-}
-
-void ACMILBC::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WebRtcIlbcfix_EncoderFree((iLBC_encinst_t_*) ptr_inst);
- }
- return;
-}
-
-int16_t ACMILBC::SetBitRateSafe(const int32_t rate) {
- // Check that rate is valid. No need to store the value
- if (rate == 13300) {
- WebRtcIlbcfix_EncoderInit(encoder_inst_ptr_, 30);
- } else if (rate == 15200) {
- WebRtcIlbcfix_EncoderInit(encoder_inst_ptr_, 20);
- } else {
- return -1;
- }
- encoder_params_.codec_inst.rate = rate;
-
- return 0;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_ilbc.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_ilbc.h
deleted file mode 100644
index bd2495fe316..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_ilbc.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ILBC_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ILBC_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct iLBC_encinst_t_;
-struct iLBC_decinst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMILBC : public ACMGenericCodec {
- public:
- explicit ACMILBC(int16_t codec_id);
- virtual ~ACMILBC();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual int16_t SetBitRateSafe(const int32_t rate) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- iLBC_encinst_t_* encoder_inst_ptr_;
- iLBC_decinst_t_* decoder_inst_ptr_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ILBC_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac.cc
deleted file mode 100644
index 61fa32f6d6d..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac.cc
+++ /dev/null
@@ -1,903 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-#include "webrtc/modules/audio_coding/main/source/acm_isac.h"
-
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_ISAC
-#include "webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h"
-#include "webrtc/modules/audio_coding/main/source/acm_isac_macros.h"
-#endif
-
-#ifdef WEBRTC_CODEC_ISACFX
-#include "webrtc/modules/audio_coding/codecs/isac/fix/interface/isacfix.h"
-#include "webrtc/modules/audio_coding/main/source/acm_isac_macros.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-// we need this otherwise we cannot use forward declaration
-// in the header file
-#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
-struct ACMISACInst {
- ACM_ISAC_STRUCT *inst;
-};
-#endif
-
-#define ISAC_MIN_RATE 10000
-#define ISAC_MAX_RATE 56000
-
-// Tables for bandwidth estimates
-#define NR_ISAC_BANDWIDTHS 24
-static const int32_t kIsacRatesWb[NR_ISAC_BANDWIDTHS] = {
- 10000, 11100, 12300, 13700, 15200, 16900,
- 18800, 20900, 23300, 25900, 28700, 31900,
- 10100, 11200, 12400, 13800, 15300, 17000,
- 18900, 21000, 23400, 26000, 28800, 32000
-};
-
-static const int32_t kIsacRatesSwb[NR_ISAC_BANDWIDTHS] = {
- 10000, 11000, 12400, 13800, 15300, 17000,
- 18900, 21000, 23200, 25400, 27600, 29800,
- 32000, 34100, 36300, 38500, 40700, 42900,
- 45100, 47300, 49500, 51700, 53900, 56000,
-};
-
-#if (!defined(WEBRTC_CODEC_ISAC) && !defined(WEBRTC_CODEC_ISACFX))
-
-ACMISAC::ACMISAC(int16_t /* codec_id */)
- : codec_inst_ptr_(NULL),
- is_enc_initialized_(false),
- isac_coding_mode_(CHANNEL_INDEPENDENT),
- enforce_frame_size_(false),
- isac_currentBN_(32000),
- samples_in10MsAudio_(160) { // Initiates to 16 kHz mode.
- // Initiate decoder parameters for the 32 kHz mode.
- memset(&decoder_params32kHz_, 0, sizeof(WebRtcACMCodecParams));
- decoder_params32kHz_.codec_inst.pltype = -1;
-
- return;
-}
-
-ACMISAC::~ACMISAC() {
- return;
-}
-
-ACMGenericCodec* ACMISAC::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMISAC::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMISAC::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMISAC::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMISAC::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMISAC::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMISAC::DestructDecoderSafe() {
- return;
-}
-
-int16_t ACMISAC::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMISAC::DestructEncoderSafe() {
- return;
-}
-
-int32_t ACMISAC::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-void ACMISAC::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-int16_t ACMISAC::DeliverCachedIsacData(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */,
- uint32_t* /* timestamp */,
- WebRtcACMEncodingType* /* encoding_type */,
- const uint16_t /* isac_rate */,
- const uint8_t /* isac_bw_estimate */) {
- return -1;
-}
-
-int16_t ACMISAC::Transcode(uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */,
- int16_t /* q_bwe */,
- int32_t /* scale */,
- bool /* is_red */) {
- return -1;
-}
-
-int16_t ACMISAC::SetBitRateSafe(int32_t /* bit_rate */) {
- return -1;
-}
-
-int32_t ACMISAC::GetEstimatedBandwidthSafe() {
- return -1;
-}
-
-int32_t ACMISAC::SetEstimatedBandwidthSafe(
- int32_t /* estimated_bandwidth */) {
- return -1;
-}
-
-int32_t ACMISAC::GetRedPayloadSafe(uint8_t* /* red_payload */,
- int16_t* /* payload_bytes */) {
- return -1;
-}
-
-int16_t ACMISAC::UpdateDecoderSampFreq(int16_t /* codec_id */) {
- return -1;
-}
-
-int16_t ACMISAC::UpdateEncoderSampFreq(
- uint16_t /* encoder_samp_freq_hz */) {
- return -1;
-}
-
-int16_t ACMISAC::EncoderSampFreq(uint16_t& /* samp_freq_hz */) {
- return -1;
-}
-
-int32_t ACMISAC::ConfigISACBandwidthEstimator(
- const uint8_t /* init_frame_size_msec */,
- const uint16_t /* init_rate_bit_per_sec */,
- const bool /* enforce_frame_size */) {
- return -1;
-}
-
-int32_t ACMISAC::SetISACMaxPayloadSize(
- const uint16_t /* max_payload_len_bytes */) {
- return -1;
-}
-
-int32_t ACMISAC::SetISACMaxRate(
- const uint32_t /* max_rate_bit_per_sec */) {
- return -1;
-}
-
-void ACMISAC::UpdateFrameLen() {
- return;
-}
-
-void ACMISAC::CurrentRate(int32_t& /*rate_bit_per_sec */) {
- return;
-}
-
-bool
-ACMISAC::DecoderParamsSafe(
- WebRtcACMCodecParams* /* dec_params */,
- const uint8_t /* payload_type */) {
- return false;
-}
-
-void
-ACMISAC::SaveDecoderParamSafe(
- const WebRtcACMCodecParams* /* codec_params */) {
- return;
-}
-
-int16_t ACMISAC::REDPayloadISAC(
- const int32_t /* isac_rate */,
- const int16_t /* isac_bw_estimate */,
- uint8_t* /* payload */,
- int16_t* /* payload_len_bytes */) {
- return -1;
-}
-
-#else //===================== Actual Implementation =======================
-
-#ifdef WEBRTC_CODEC_ISACFX
-
-// How the scaling is computed. iSAC computes a gain based on the
-// bottleneck. It follows the following expression for that
-//
-// G(BN_kbps) = pow(10, (a + b * BN_kbps + c * BN_kbps * BN_kbps) / 20.0)
-// / 3.4641;
-//
-// Where for 30 ms framelength we have,
-//
-// a = -23; b = 0.48; c = 0;
-//
-// As the default encoder is operating at 32kbps we have the scale as
-//
-// S(BN_kbps) = G(BN_kbps) / G(32);
-
-#define ISAC_NUM_SUPPORTED_RATES 9
-
-static const uint16_t kIsacSuportedRates[ISAC_NUM_SUPPORTED_RATES] = {
- 32000, 30000, 26000, 23000, 21000,
- 19000, 17000, 15000, 12000
-};
-
-static const float kIsacScale[ISAC_NUM_SUPPORTED_RATES] = {
- 1.0f, 0.8954f, 0.7178f, 0.6081f, 0.5445f,
- 0.4875f, 0.4365f, 0.3908f, 0.3311f
-};
-
-enum IsacSamplingRate {
- kIsacWideband = 16,
- kIsacSuperWideband = 32
-};
-
-static float ACMISACFixTranscodingScale(uint16_t rate) {
- // find the scale for transcoding, the scale is rounded
- // downward
- float scale = -1;
- for (int16_t n = 0; n < ISAC_NUM_SUPPORTED_RATES; n++) {
- if (rate >= kIsacSuportedRates[n]) {
- scale = kIsacScale[n];
- break;
- }
- }
- return scale;
-}
-
-static void ACMISACFixGetSendBitrate(ACM_ISAC_STRUCT* inst,
- int32_t* bottleneck) {
- *bottleneck = WebRtcIsacfix_GetUplinkBw(inst);
-}
-
-static int16_t ACMISACFixGetNewBitstream(ACM_ISAC_STRUCT* inst,
- int16_t bwe_index,
- int16_t /* jitter_index */,
- int32_t rate,
- int16_t* bitstream,
- bool is_red) {
- if (is_red) {
- // RED not supported with iSACFIX
- return -1;
- }
- float scale = ACMISACFixTranscodingScale((uint16_t) rate);
- return WebRtcIsacfix_GetNewBitStream(inst, bwe_index, scale, bitstream);
-}
-
-static int16_t ACMISACFixGetSendBWE(ACM_ISAC_STRUCT* inst,
- int16_t* rate_index,
- int16_t* /* dummy */) {
- int16_t local_rate_index;
- int16_t status = WebRtcIsacfix_GetDownLinkBwIndex(inst,
- &local_rate_index);
- if (status < 0) {
- return -1;
- } else {
- *rate_index = local_rate_index;
- return 0;
- }
-}
-
-static int16_t ACMISACFixControlBWE(ACM_ISAC_STRUCT* inst,
- int32_t rate_bps,
- int16_t frame_size_ms,
- int16_t enforce_frame_size) {
- return WebRtcIsacfix_ControlBwe(inst, (int16_t) rate_bps, frame_size_ms,
- enforce_frame_size);
-}
-
-static int16_t ACMISACFixControl(ACM_ISAC_STRUCT* inst,
- int32_t rate_bps,
- int16_t frame_size_ms) {
- return WebRtcIsacfix_Control(inst, (int16_t) rate_bps, frame_size_ms);
-}
-
-// The following two function should have the same signature as their counter
-// part in iSAC floating-point, i.e. WebRtcIsac_EncSampRate &
-// WebRtcIsac_DecSampRate.
-static uint16_t ACMISACFixGetEncSampRate(ACM_ISAC_STRUCT* /* inst */) {
- return 16000;
-}
-
-static uint16_t ACMISACFixGetDecSampRate(ACM_ISAC_STRUCT* /* inst */) {
- return 16000;
-}
-
-#endif
-
-ACMISAC::ACMISAC(int16_t codec_id)
- : is_enc_initialized_(false),
- isac_coding_mode_(CHANNEL_INDEPENDENT),
- enforce_frame_size_(false),
- isac_current_bn_(32000),
- samples_in_10ms_audio_(160) { // Initiates to 16 kHz mode.
- codec_id_ = codec_id;
-
- // Create codec instance.
- codec_inst_ptr_ = new ACMISACInst;
- if (codec_inst_ptr_ == NULL) {
- return;
- }
- codec_inst_ptr_->inst = NULL;
-
- // Initiate decoder parameters for the 32 kHz mode.
- memset(&decoder_params_32khz_, 0, sizeof(WebRtcACMCodecParams));
- decoder_params_32khz_.codec_inst.pltype = -1;
-
- // TODO(tlegrand): Check if the following is really needed, now that
- // ACMGenericCodec has been updated to initialize this value.
- // Initialize values that can be used uninitialized otherwise
- decoder_params_.codec_inst.pltype = -1;
-}
-
-ACMISAC::~ACMISAC() {
- if (codec_inst_ptr_ != NULL) {
- if (codec_inst_ptr_->inst != NULL) {
- ACM_ISAC_FREE(codec_inst_ptr_->inst);
- codec_inst_ptr_->inst = NULL;
- }
- delete codec_inst_ptr_;
- codec_inst_ptr_ = NULL;
- }
- return;
-}
-
-ACMGenericCodec* ACMISAC::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMISAC::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- // ISAC takes 10ms audio everytime we call encoder, therefor,
- // it should be treated like codecs with 'basic coding block'
- // non-zero, and the following 'while-loop' should not be necessary.
- // However, due to a mistake in the codec the frame-size might change
- // at the first 10ms pushed in to iSAC if the bit-rate is low, this is
- // sort of a bug in iSAC. to address this we treat iSAC as the
- // following.
- if (codec_inst_ptr_ == NULL) {
- return -1;
- }
- *bitstream_len_byte = 0;
- while ((*bitstream_len_byte == 0) && (in_audio_ix_read_ < frame_len_smpl_)) {
- if (in_audio_ix_read_ > in_audio_ix_write_) {
- // something is wrong.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "The actual fram-size of iSAC appears to be larger that "
- "expected. All audio pushed in but no bit-stream is "
- "generated.");
- return -1;
- }
- *bitstream_len_byte = ACM_ISAC_ENCODE(codec_inst_ptr_->inst,
- &in_audio_[in_audio_ix_read_],
- (int16_t*)bitstream);
- // increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += samples_in_10ms_audio_;
- }
- if (*bitstream_len_byte == 0) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, unique_id_,
- "ISAC Has encoded the whole frame but no bit-stream is "
- "generated.");
- }
-
- // a packet is generated iSAC, is set in adaptive mode may change
- // the frame length and we like to update the bottleneck value as
- // well, although updating bottleneck is not crucial
- if ((*bitstream_len_byte > 0) && (isac_coding_mode_ == ADAPTIVE)) {
- ACM_ISAC_GETSENDBITRATE(codec_inst_ptr_->inst, &isac_current_bn_);
- }
- UpdateFrameLen();
- return *bitstream_len_byte;
-}
-
-int16_t ACMISAC::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_sample */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMISAC::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
- // if rate is set to -1 then iSAC has to be in adaptive mode
- if (codec_params->codec_inst.rate == -1) {
- isac_coding_mode_ = ADAPTIVE;
- } else if ((codec_params->codec_inst.rate >= ISAC_MIN_RATE) &&
- (codec_params->codec_inst.rate <= ISAC_MAX_RATE)) {
- // sanity check that rate is in acceptable range
- isac_coding_mode_ = CHANNEL_INDEPENDENT;
- isac_current_bn_ = codec_params->codec_inst.rate;
- } else {
- return -1;
- }
-
- // we need to set the encoder sampling frequency.
- if (UpdateEncoderSampFreq((uint16_t) codec_params->codec_inst.plfreq)
- < 0) {
- return -1;
- }
- if (ACM_ISAC_ENCODERINIT(codec_inst_ptr_->inst, isac_coding_mode_) < 0) {
- return -1;
- }
-
- // apply the frame-size and rate if operating in
- // channel-independent mode
- if (isac_coding_mode_ == CHANNEL_INDEPENDENT) {
- if (ACM_ISAC_CONTROL(codec_inst_ptr_->inst,
- codec_params->codec_inst.rate,
- codec_params->codec_inst.pacsize /
- (codec_params->codec_inst.plfreq / 1000)) < 0) {
- return -1;
- }
- } else {
- // We need this for adaptive case and has to be called
- // after initialization
- ACM_ISAC_GETSENDBITRATE(codec_inst_ptr_->inst, &isac_current_bn_);
- }
- frame_len_smpl_ = ACM_ISAC_GETNEWFRAMELEN(codec_inst_ptr_->inst);
- return 0;
-}
-
-int16_t ACMISAC::InternalInitDecoder(WebRtcACMCodecParams* codec_params) {
- if (codec_inst_ptr_ == NULL) {
- return -1;
- }
-
- // set decoder sampling frequency.
- if (codec_params->codec_inst.plfreq == 32000 ||
- codec_params->codec_inst.plfreq == 48000) {
- UpdateDecoderSampFreq(ACMCodecDB::kISACSWB);
- } else {
- UpdateDecoderSampFreq(ACMCodecDB::kISAC);
- }
-
- // in a one-way communication we may never register send-codec.
- // However we like that the BWE to work properly so it has to
- // be initialized. The BWE is initialized when iSAC encoder is initialized.
- // Therefore, we need this.
- if (!encoder_initialized_) {
- // Since we don't require a valid rate or a valid packet size when
- // initializing the decoder, we set valid values before initializing encoder
- codec_params->codec_inst.rate = kIsacWbDefaultRate;
- codec_params->codec_inst.pacsize = kIsacPacSize960;
- if (InternalInitEncoder(codec_params) < 0) {
- return -1;
- }
- encoder_initialized_ = true;
- }
-
- return ACM_ISAC_DECODERINIT(codec_inst_ptr_->inst);
-}
-
-int16_t ACMISAC::InternalCreateDecoder() {
- if (codec_inst_ptr_ == NULL) {
- return -1;
- }
- int16_t status = ACM_ISAC_CREATE(&(codec_inst_ptr_->inst));
-
- // specific to codecs with one instance for encoding and decoding
- encoder_initialized_ = false;
- if (status < 0) {
- encoder_exist_ = false;
- } else {
- encoder_exist_ = true;
- }
- return status;
-}
-
-void ACMISAC::DestructDecoderSafe() {
- // codec with shared instance cannot delete.
- decoder_initialized_ = false;
- return;
-}
-
-int16_t ACMISAC::InternalCreateEncoder() {
- if (codec_inst_ptr_ == NULL) {
- return -1;
- }
- int16_t status = ACM_ISAC_CREATE(&(codec_inst_ptr_->inst));
-
- // specific to codecs with one instance for encoding and decoding
- decoder_initialized_ = false;
- if (status < 0) {
- decoder_exist_ = false;
- } else {
- decoder_exist_ = true;
- }
- return status;
-}
-
-void ACMISAC::DestructEncoderSafe() {
- // codec with shared instance cannot delete.
- encoder_initialized_ = false;
- return;
-}
-
-int32_t ACMISAC::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- // Sanity checks
- if (codec_inst_ptr_ == NULL) {
- return -1;
- }
- if (!decoder_initialized_ || !decoder_exist_) {
- return -1;
- }
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_ISAC_FUNCTION."
- // Then call NetEQ to add the codec to it's
- // database.
- if (codec_inst.plfreq == 16000) {
- SET_CODEC_PAR((codec_def), kDecoderISAC, codec_inst.pltype,
- codec_inst_ptr_->inst, 16000);
-#ifdef WEBRTC_CODEC_ISAC
- SET_ISAC_FUNCTIONS((codec_def));
-#else
- SET_ISACfix_FUNCTIONS((codec_def));
-#endif
- } else {
-#ifdef WEBRTC_CODEC_ISAC
- // Decoder is either @ 16 kHz or 32 kHz. Even if encoder is set @ 48 kHz
- // decoding is @ 32 kHz.
- if (codec_inst.plfreq == 32000) {
- SET_CODEC_PAR((codec_def), kDecoderISACswb, codec_inst.pltype,
- codec_inst_ptr_->inst, 32000);
- SET_ISACSWB_FUNCTIONS((codec_def));
- } else {
- SET_CODEC_PAR((codec_def), kDecoderISACfb, codec_inst.pltype,
- codec_inst_ptr_->inst, 32000);
- SET_ISACFB_FUNCTIONS((codec_def));
- }
-#else
- return -1;
-#endif
- }
- return 0;
-}
-
-void ACMISAC::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- ACM_ISAC_FREE((ACM_ISAC_STRUCT *) ptr_inst);
- }
- return;
-}
-
-int16_t ACMISAC::Transcode(uint8_t* bitstream,
- int16_t* bitstream_len_byte,
- int16_t q_bwe,
- int32_t rate,
- bool is_red) {
- int16_t jitter_info = 0;
- // transcode from a higher rate to lower rate sanity check
- if (codec_inst_ptr_ == NULL) {
- return -1;
- }
-
- *bitstream_len_byte = ACM_ISAC_GETNEWBITSTREAM(codec_inst_ptr_->inst, q_bwe,
- jitter_info, rate,
- (int16_t*)bitstream,
- (is_red) ? 1 : 0);
-
- if (*bitstream_len_byte < 0) {
- // error happened
- *bitstream_len_byte = 0;
- return -1;
- } else {
- return *bitstream_len_byte;
- }
-}
-
-int16_t ACMISAC::SetBitRateSafe(int32_t bit_rate) {
- if (codec_inst_ptr_ == NULL) {
- return -1;
- }
- uint16_t encoder_samp_freq;
- EncoderSampFreq(encoder_samp_freq);
- bool reinit = false;
- // change the BN of iSAC
- if (bit_rate == -1) {
- // ADAPTIVE MODE
- // Check if it was already in adaptive mode
- if (isac_coding_mode_ != ADAPTIVE) {
- // was not in adaptive, then set the mode to adaptive
- // and flag for re-initialization
- isac_coding_mode_ = ADAPTIVE;
- reinit = true;
- }
- } else if ((bit_rate >= ISAC_MIN_RATE) && (bit_rate <= ISAC_MAX_RATE)) {
- // Sanity check if the rate valid
- // check if it was in channel-independent mode before
- if (isac_coding_mode_ != CHANNEL_INDEPENDENT) {
- // was not in channel independent, set the mode to
- // channel-independent and flag for re-initialization
- isac_coding_mode_ = CHANNEL_INDEPENDENT;
- reinit = true;
- }
- // store the bottleneck
- isac_current_bn_ = (uint16_t) bit_rate;
- } else {
- // invlaid rate
- return -1;
- }
-
- int16_t status = 0;
- if (reinit) {
- // initialize and check if it is successful
- if (ACM_ISAC_ENCODERINIT(codec_inst_ptr_->inst, isac_coding_mode_) < 0) {
- // failed initialization
- return -1;
- }
- }
- if (isac_coding_mode_ == CHANNEL_INDEPENDENT) {
- status = ACM_ISAC_CONTROL(
- codec_inst_ptr_->inst, isac_current_bn_,
- (encoder_samp_freq == 32000 || encoder_samp_freq == 48000) ? 30 :
- (frame_len_smpl_ / 16));
- if (status < 0) {
- status = -1;
- }
- }
-
- // Update encoder parameters
- encoder_params_.codec_inst.rate = bit_rate;
-
- UpdateFrameLen();
- return status;
-}
-
-int32_t ACMISAC::GetEstimatedBandwidthSafe() {
- int16_t bandwidth_index = 0;
- int16_t delay_index = 0;
- int samp_rate;
-
- // Get bandwidth information
- ACM_ISAC_GETSENDBWE(codec_inst_ptr_->inst, &bandwidth_index, &delay_index);
-
- // Validy check of index
- if ((bandwidth_index < 0) || (bandwidth_index >= NR_ISAC_BANDWIDTHS)) {
- return -1;
- }
-
- // Check sample frequency
- samp_rate = ACM_ISAC_GETDECSAMPRATE(codec_inst_ptr_->inst);
- if (samp_rate == 16000) {
- return kIsacRatesWb[bandwidth_index];
- } else {
- return kIsacRatesSwb[bandwidth_index];
- }
-}
-
-int32_t ACMISAC::SetEstimatedBandwidthSafe(
- int32_t estimated_bandwidth) {
- int samp_rate;
- int16_t bandwidth_index;
-
- // Check sample frequency and choose appropriate table
- samp_rate = ACM_ISAC_GETENCSAMPRATE(codec_inst_ptr_->inst);
-
- if (samp_rate == 16000) {
- // Search through the WB rate table to find the index
- bandwidth_index = NR_ISAC_BANDWIDTHS / 2 - 1;
- for (int i = 0; i < (NR_ISAC_BANDWIDTHS / 2); i++) {
- if (estimated_bandwidth == kIsacRatesWb[i]) {
- bandwidth_index = i;
- break;
- } else if (estimated_bandwidth
- == kIsacRatesWb[i + NR_ISAC_BANDWIDTHS / 2]) {
- bandwidth_index = i + NR_ISAC_BANDWIDTHS / 2;
- break;
- } else if (estimated_bandwidth < kIsacRatesWb[i]) {
- bandwidth_index = i;
- break;
- }
- }
- } else {
- // Search through the SWB rate table to find the index
- bandwidth_index = NR_ISAC_BANDWIDTHS - 1;
- for (int i = 0; i < NR_ISAC_BANDWIDTHS; i++) {
- if (estimated_bandwidth <= kIsacRatesSwb[i]) {
- bandwidth_index = i;
- break;
- }
- }
- }
-
- // Set iSAC Bandwidth Estimate
- ACM_ISAC_SETBWE(codec_inst_ptr_->inst, bandwidth_index);
-
- return 0;
-}
-
-int32_t ACMISAC::GetRedPayloadSafe(
-#if (!defined(WEBRTC_CODEC_ISAC))
- uint8_t* /* red_payload */, int16_t* /* payload_bytes */) {
- return -1;
-#else
- uint8_t* red_payload, int16_t* payload_bytes) {
- int16_t bytes = WebRtcIsac_GetRedPayload(codec_inst_ptr_->inst,
- (int16_t*)red_payload);
- if (bytes < 0) {
- return -1;
- }
- *payload_bytes = bytes;
- return 0;
-#endif
-}
-
-int16_t ACMISAC::UpdateDecoderSampFreq(
-#ifdef WEBRTC_CODEC_ISAC
- int16_t codec_id) {
- // The decoder supports only wideband and super-wideband.
- if (ACMCodecDB::kISAC == codec_id) {
- return WebRtcIsac_SetDecSampRate(codec_inst_ptr_->inst, 16000);
- } else if (ACMCodecDB::kISACSWB == codec_id ||
- ACMCodecDB::kISACFB == codec_id) {
- return WebRtcIsac_SetDecSampRate(codec_inst_ptr_->inst, 32000);
- } else {
- return -1;
- }
-#else
- int16_t /* codec_id */) {
- return 0;
-#endif
-}
-
-int16_t ACMISAC::UpdateEncoderSampFreq(
-#ifdef WEBRTC_CODEC_ISAC
- uint16_t encoder_samp_freq_hz) {
- uint16_t current_samp_rate_hz;
- EncoderSampFreq(current_samp_rate_hz);
-
- if (current_samp_rate_hz != encoder_samp_freq_hz) {
- if ((encoder_samp_freq_hz != 16000) &&
- (encoder_samp_freq_hz != 32000) &&
- (encoder_samp_freq_hz != 48000)) {
- return -1;
- } else {
- in_audio_ix_read_ = 0;
- in_audio_ix_write_ = 0;
- in_timestamp_ix_write_ = 0;
- if (WebRtcIsac_SetEncSampRate(codec_inst_ptr_->inst,
- encoder_samp_freq_hz) < 0) {
- return -1;
- }
- samples_in_10ms_audio_ = encoder_samp_freq_hz / 100;
- frame_len_smpl_ = ACM_ISAC_GETNEWFRAMELEN(codec_inst_ptr_->inst);
- encoder_params_.codec_inst.pacsize = frame_len_smpl_;
- encoder_params_.codec_inst.plfreq = encoder_samp_freq_hz;
- return 0;
- }
- }
-#else
- uint16_t /* codec_id */) {
-#endif
- return 0;
-}
-
-int16_t ACMISAC::EncoderSampFreq(uint16_t& samp_freq_hz) {
- samp_freq_hz = ACM_ISAC_GETENCSAMPRATE(codec_inst_ptr_->inst);
- return 0;
-}
-
-int32_t ACMISAC::ConfigISACBandwidthEstimator(
- const uint8_t init_frame_size_msec,
- const uint16_t init_rate_bit_per_sec,
- const bool enforce_frame_size) {
- int16_t status;
- {
- uint16_t samp_freq_hz;
- EncoderSampFreq(samp_freq_hz);
- // TODO(turajs): at 32kHz we hardcode calling with 30ms and enforce
- // the frame-size otherwise we might get error. Revise if
- // control-bwe is changed.
- if (samp_freq_hz == 32000 || samp_freq_hz == 48000) {
- status = ACM_ISAC_CONTROL_BWE(codec_inst_ptr_->inst,
- init_rate_bit_per_sec, 30, 1);
- } else {
- status = ACM_ISAC_CONTROL_BWE(codec_inst_ptr_->inst,
- init_rate_bit_per_sec,
- init_frame_size_msec,
- enforce_frame_size ? 1 : 0);
- }
- }
- if (status < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Couldn't config iSAC BWE.");
- return -1;
- }
- UpdateFrameLen();
- ACM_ISAC_GETSENDBITRATE(codec_inst_ptr_->inst, &isac_current_bn_);
- return 0;
-}
-
-int32_t ACMISAC::SetISACMaxPayloadSize(
- const uint16_t max_payload_len_bytes) {
- return ACM_ISAC_SETMAXPAYLOADSIZE(codec_inst_ptr_->inst,
- max_payload_len_bytes);
-}
-
-int32_t ACMISAC::SetISACMaxRate(
- const uint32_t max_rate_bit_per_sec) {
- return ACM_ISAC_SETMAXRATE(codec_inst_ptr_->inst, max_rate_bit_per_sec);
-}
-
-void ACMISAC::UpdateFrameLen() {
- frame_len_smpl_ = ACM_ISAC_GETNEWFRAMELEN(codec_inst_ptr_->inst);
- encoder_params_.codec_inst.pacsize = frame_len_smpl_;
-}
-
-void ACMISAC::CurrentRate(int32_t& rate_bit_per_sec) {
- if (isac_coding_mode_ == ADAPTIVE) {
- ACM_ISAC_GETSENDBITRATE(codec_inst_ptr_->inst, &rate_bit_per_sec);
- }
-}
-
-bool ACMISAC::DecoderParamsSafe(WebRtcACMCodecParams* dec_params,
- const uint8_t payload_type) {
- if (decoder_initialized_) {
- if (payload_type == decoder_params_.codec_inst.pltype) {
- memcpy(dec_params, &decoder_params_, sizeof(WebRtcACMCodecParams));
- return true;
- }
- if (payload_type == decoder_params_32khz_.codec_inst.pltype) {
- memcpy(dec_params, &decoder_params_32khz_, sizeof(WebRtcACMCodecParams));
- return true;
- }
- }
- return false;
-}
-
-void ACMISAC::SaveDecoderParamSafe(const WebRtcACMCodecParams* codec_params) {
- // set decoder sampling frequency.
- if (codec_params->codec_inst.plfreq == 32000 ||
- codec_params->codec_inst.plfreq == 48000) {
- memcpy(&decoder_params_32khz_, codec_params, sizeof(WebRtcACMCodecParams));
- } else {
- memcpy(&decoder_params_, codec_params, sizeof(WebRtcACMCodecParams));
- }
-}
-
-int16_t ACMISAC::REDPayloadISAC(const int32_t isac_rate,
- const int16_t isac_bw_estimate,
- uint8_t* payload,
- int16_t* payload_len_bytes) {
- int16_t status;
- ReadLockScoped rl(codec_wrapper_lock_);
- status = Transcode(payload, payload_len_bytes, isac_bw_estimate, isac_rate,
- true);
- return status;
-}
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac.h
deleted file mode 100644
index 20b6c5391be..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac.h
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-struct ACMISACInst;
-
-enum IsacCodingMode {
- ADAPTIVE,
- CHANNEL_INDEPENDENT
-};
-
-class ACMISAC : public ACMGenericCodec {
- public:
- explicit ACMISAC(int16_t codec_id);
- virtual ~ACMISAC();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- int16_t DeliverCachedIsacData(uint8_t* bitstream,
- int16_t* bitstream_len_byte,
- uint32_t* timestamp,
- WebRtcACMEncodingType* encoding_type,
- const uint16_t isac_rate,
- const uint8_t isac_bwestimate);
-
- int16_t DeliverCachedData(uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */,
- uint32_t* /* timestamp */,
- WebRtcACMEncodingType* /* encoding_type */) {
- return -1;
- }
-
- virtual int16_t UpdateDecoderSampFreq(int16_t codec_id) OVERRIDE;
-
- virtual int16_t UpdateEncoderSampFreq(uint16_t samp_freq_hz) OVERRIDE;
-
- virtual int16_t EncoderSampFreq(uint16_t& samp_freq_hz) OVERRIDE;
-
- virtual int32_t ConfigISACBandwidthEstimator(
- const uint8_t init_frame_size_msec,
- const uint16_t init_rate_bit_per_sec,
- const bool enforce_frame_size) OVERRIDE;
-
- virtual int32_t SetISACMaxPayloadSize(
- const uint16_t max_payload_len_bytes) OVERRIDE;
-
- virtual int32_t SetISACMaxRate(const uint32_t max_rate_bit_per_sec) OVERRIDE;
-
- virtual int16_t REDPayloadISAC(const int32_t isac_rate,
- const int16_t isac_bw_estimate,
- uint8_t* payload,
- int16_t* payload_len_bytes) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t SetBitRateSafe(const int32_t bit_rate) OVERRIDE;
-
- virtual int32_t GetEstimatedBandwidthSafe() OVERRIDE;
-
- virtual int32_t SetEstimatedBandwidthSafe(
- int32_t estimated_bandwidth) OVERRIDE;
-
- virtual int32_t GetRedPayloadSafe(uint8_t* red_payload,
- int16_t* payload_bytes) OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- int16_t Transcode(uint8_t* bitstream,
- int16_t* bitstream_len_byte,
- int16_t q_bwe,
- int32_t rate,
- bool is_red);
-
- virtual void CurrentRate(int32_t& rate_bit_per_sec) OVERRIDE;
-
- void UpdateFrameLen();
-
- virtual bool DecoderParamsSafe(WebRtcACMCodecParams* dec_params,
- const uint8_t payload_type) OVERRIDE;
-
- virtual void SaveDecoderParamSafe(
- const WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- ACMISACInst* codec_inst_ptr_;
- bool is_enc_initialized_;
- IsacCodingMode isac_coding_mode_;
- bool enforce_frame_size_;
- int32_t isac_current_bn_;
- uint16_t samples_in_10ms_audio_;
- WebRtcACMCodecParams decoder_params_32khz_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac_macros.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac_macros.h
deleted file mode 100644
index 01e1e44b3e6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_isac_macros.h
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_MACROS_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_MACROS_H_
-
-#include "webrtc/engine_configurations.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifdef WEBRTC_CODEC_ISAC
-#define ACM_ISAC_CREATE WebRtcIsac_Create
-#define ACM_ISAC_FREE WebRtcIsac_Free
-#define ACM_ISAC_ENCODERINIT WebRtcIsac_EncoderInit
-#define ACM_ISAC_ENCODE WebRtcIsac_Encode
-#define ACM_ISAC_DECODERINIT WebRtcIsac_DecoderInit
-#define ACM_ISAC_DECODE_BWE WebRtcIsac_UpdateBwEstimate
-#define ACM_ISAC_DECODE_B WebRtcIsac_Decode
-#define ACM_ISAC_DECODEPLC WebRtcIsac_DecodePlc
-#define ACM_ISAC_CONTROL WebRtcIsac_Control
-#define ACM_ISAC_CONTROL_BWE WebRtcIsac_ControlBwe
-#define ACM_ISAC_GETFRAMELEN WebRtcIsac_ReadFrameLen
-#define ACM_ISAC_GETERRORCODE WebRtcIsac_GetErrorCode
-#define ACM_ISAC_GETSENDBITRATE WebRtcIsac_GetUplinkBw
-#define ACM_ISAC_SETMAXPAYLOADSIZE WebRtcIsac_SetMaxPayloadSize
-#define ACM_ISAC_SETMAXRATE WebRtcIsac_SetMaxRate
-#define ACM_ISAC_GETNEWBITSTREAM WebRtcIsac_GetNewBitStream
-#define ACM_ISAC_GETSENDBWE WebRtcIsac_GetDownLinkBwIndex
-#define ACM_ISAC_SETBWE WebRtcIsac_UpdateUplinkBw
-#define ACM_ISAC_GETBWE WebRtcIsac_ReadBwIndex
-#define ACM_ISAC_GETNEWFRAMELEN WebRtcIsac_GetNewFrameLen
-#define ACM_ISAC_STRUCT ISACStruct
-#define ACM_ISAC_GETENCSAMPRATE WebRtcIsac_EncSampRate
-#define ACM_ISAC_GETDECSAMPRATE WebRtcIsac_DecSampRate
-#endif
-
-#ifdef WEBRTC_CODEC_ISACFX
-#define ACM_ISAC_CREATE WebRtcIsacfix_Create
-#define ACM_ISAC_FREE WebRtcIsacfix_Free
-#define ACM_ISAC_ENCODERINIT WebRtcIsacfix_EncoderInit
-#define ACM_ISAC_ENCODE WebRtcIsacfix_Encode
-#define ACM_ISAC_DECODERINIT WebRtcIsacfix_DecoderInit
-#define ACM_ISAC_DECODE_BWE WebRtcIsacfix_UpdateBwEstimate
-#define ACM_ISAC_DECODE_B WebRtcIsacfix_Decode
-#define ACM_ISAC_DECODEPLC WebRtcIsacfix_DecodePlc
-#define ACM_ISAC_CONTROL ACMISACFixControl // local Impl
-#define ACM_ISAC_CONTROL_BWE ACMISACFixControlBWE // local Impl
-#define ACM_ISAC_GETFRAMELEN WebRtcIsacfix_ReadFrameLen
-#define ACM_ISAC_GETERRORCODE WebRtcIsacfix_GetErrorCode
-#define ACM_ISAC_GETSENDBITRATE ACMISACFixGetSendBitrate // local Impl
-#define ACM_ISAC_SETMAXPAYLOADSIZE WebRtcIsacfix_SetMaxPayloadSize
-#define ACM_ISAC_SETMAXRATE WebRtcIsacfix_SetMaxRate
-#define ACM_ISAC_GETNEWBITSTREAM ACMISACFixGetNewBitstream // local Impl
-#define ACM_ISAC_GETSENDBWE ACMISACFixGetSendBWE // local Impl
-#define ACM_ISAC_SETBWE WebRtcIsacfix_UpdateUplinkBw
-#define ACM_ISAC_GETBWE WebRtcIsacfix_ReadBwIndex
-#define ACM_ISAC_GETNEWFRAMELEN WebRtcIsacfix_GetNewFrameLen
-#define ACM_ISAC_STRUCT ISACFIX_MainStruct
-#define ACM_ISAC_GETENCSAMPRATE ACMISACFixGetEncSampRate // local Impl
-#define ACM_ISAC_GETDECSAMPRATE ACMISACFixGetDecSampRate // local Impl
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_MACROS_H_
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq.cc
deleted file mode 100644
index 154cc54d004..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq.cc
+++ /dev/null
@@ -1,1151 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-
-#include <stdlib.h> // malloc
-
-#include <algorithm> // sort
-#include <vector>
-
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h"
-#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-#include "webrtc/system_wrappers/interface/trace_event.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-#define RTP_HEADER_SIZE 12
-#define NETEQ_INIT_FREQ 8000
-#define NETEQ_INIT_FREQ_KHZ (NETEQ_INIT_FREQ/1000)
-#define NETEQ_ERR_MSG_LEN_BYTE (WEBRTC_NETEQ_MAX_ERROR_NAME + 1)
-
-ACMNetEQ::ACMNetEQ()
- : id_(0),
- current_samp_freq_khz_(NETEQ_INIT_FREQ_KHZ),
- avt_playout_(false),
- playout_mode_(voice),
- neteq_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- vad_status_(false),
- vad_mode_(VADNormal),
- decode_lock_(RWLockWrapper::CreateRWLock()),
- num_slaves_(0),
- received_stereo_(false),
- master_slave_info_(NULL),
- previous_audio_activity_(AudioFrame::kVadUnknown),
- callback_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- min_of_max_num_packets_(0),
- min_of_buffer_size_bytes_(0),
- per_packet_overhead_bytes_(0),
- av_sync_(false),
- minimum_delay_ms_(0),
- maximum_delay_ms_(0) {
- for (int n = 0; n < MAX_NUM_SLAVE_NETEQ + 1; n++) {
- is_initialized_[n] = false;
- ptr_vadinst_[n] = NULL;
- inst_[n] = NULL;
- inst_mem_[n] = NULL;
- neteq_packet_buffer_[n] = NULL;
- }
-}
-
-ACMNetEQ::~ACMNetEQ() {
- {
- CriticalSectionScoped lock(neteq_crit_sect_);
- RemoveNetEQSafe(0); // Master.
- RemoveSlavesSafe();
- }
- if (neteq_crit_sect_ != NULL) {
- delete neteq_crit_sect_;
- }
-
- if (decode_lock_ != NULL) {
- delete decode_lock_;
- }
-
- if (callback_crit_sect_ != NULL) {
- delete callback_crit_sect_;
- }
-}
-
-int32_t ACMNetEQ::Init() {
- CriticalSectionScoped lock(neteq_crit_sect_);
-
- for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
- if (InitByIdxSafe(idx) < 0) {
- return -1;
- }
- // delete VAD instance and start fresh if required.
- if (ptr_vadinst_[idx] != NULL) {
- WebRtcVad_Free(ptr_vadinst_[idx]);
- ptr_vadinst_[idx] = NULL;
- }
- if (vad_status_) {
- // Has to enable VAD
- if (EnableVADByIdxSafe(idx) < 0) {
- // Failed to enable VAD.
- // Delete VAD instance, if it is created
- if (ptr_vadinst_[idx] != NULL) {
- WebRtcVad_Free(ptr_vadinst_[idx]);
- ptr_vadinst_[idx] = NULL;
- }
- // We are at initialization of NetEq, if failed to
- // enable VAD, we delete the NetEq instance.
- if (inst_mem_[idx] != NULL) {
- free(inst_mem_[idx]);
- inst_mem_[idx] = NULL;
- inst_[idx] = NULL;
- }
- is_initialized_[idx] = false;
- return -1;
- }
- }
- is_initialized_[idx] = true;
- }
- if (EnableVAD() == -1) {
- return -1;
- }
- return 0;
-}
-
-int16_t ACMNetEQ::InitByIdxSafe(const int16_t idx) {
- int memory_size_bytes;
- if (WebRtcNetEQ_AssignSize(&memory_size_bytes) != 0) {
- LogError("AssignSize", idx);
- return -1;
- }
-
- if (inst_mem_[idx] != NULL) {
- free(inst_mem_[idx]);
- inst_mem_[idx] = NULL;
- inst_[idx] = NULL;
- }
- inst_mem_[idx] = malloc(memory_size_bytes);
- if (inst_mem_[idx] == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "InitByIdxSafe: NetEq Initialization error: could not "
- "allocate memory for NetEq");
- is_initialized_[idx] = false;
- return -1;
- }
- if (WebRtcNetEQ_Assign(&inst_[idx], inst_mem_[idx]) != 0) {
- if (inst_mem_[idx] != NULL) {
- free(inst_mem_[idx]);
- inst_mem_[idx] = NULL;
- inst_[idx] = NULL;
- }
- LogError("Assign", idx);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "InitByIdxSafe: NetEq Initialization error: could not Assign");
- is_initialized_[idx] = false;
- return -1;
- }
- if (WebRtcNetEQ_Init(inst_[idx], NETEQ_INIT_FREQ) != 0) {
- if (inst_mem_[idx] != NULL) {
- free(inst_mem_[idx]);
- inst_mem_[idx] = NULL;
- inst_[idx] = NULL;
- }
- LogError("Init", idx);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "InitByIdxSafe: NetEq Initialization error: could not "
- "initialize NetEq");
- is_initialized_[idx] = false;
- return -1;
- }
- is_initialized_[idx] = true;
- return 0;
-}
-
-int16_t ACMNetEQ::EnableVADByIdxSafe(const int16_t idx) {
- if (ptr_vadinst_[idx] == NULL) {
- if (WebRtcVad_Create(&ptr_vadinst_[idx]) < 0) {
- ptr_vadinst_[idx] = NULL;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "EnableVADByIdxSafe: NetEq Initialization error: could not "
- "create VAD");
- return -1;
- }
- }
-
- if (WebRtcNetEQ_SetVADInstance(
- inst_[idx], ptr_vadinst_[idx],
- (WebRtcNetEQ_VADInitFunction) WebRtcVad_Init,
- (WebRtcNetEQ_VADSetmodeFunction) WebRtcVad_set_mode,
- (WebRtcNetEQ_VADFunction) WebRtcVad_Process) < 0) {
- LogError("setVADinstance", idx);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "EnableVADByIdxSafe: NetEq Initialization error: could not "
- "set VAD instance");
- return -1;
- }
-
- if (WebRtcNetEQ_SetVADMode(inst_[idx], vad_mode_) < 0) {
- LogError("setVADmode", idx);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "EnableVADByIdxSafe: NetEq Initialization error: could not "
- "set VAD mode");
- return -1;
- }
- return 0;
-}
-
-int32_t ACMNetEQ::AllocatePacketBuffer(
- const WebRtcNetEQDecoder* used_codecs,
- int16_t num_codecs) {
- // Due to WebRtcNetEQ_GetRecommendedBufferSize
- // the following has to be int otherwise we will have compiler error
- // if not casted
-
- CriticalSectionScoped lock(neteq_crit_sect_);
- for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
- if (AllocatePacketBufferByIdxSafe(used_codecs, num_codecs, idx) < 0) {
- return -1;
- }
- }
- return 0;
-}
-
-int16_t ACMNetEQ::AllocatePacketBufferByIdxSafe(
- const WebRtcNetEQDecoder* used_codecs,
- int16_t num_codecs,
- const int16_t idx) {
- int max_num_packets;
- int buffer_size_in_bytes;
- int per_packet_overhead_bytes;
-
- if (!is_initialized_[idx]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "AllocatePacketBufferByIdxSafe: NetEq is not initialized.");
- return -1;
- }
- if (WebRtcNetEQ_GetRecommendedBufferSize(inst_[idx], used_codecs,
- num_codecs,
- kTCPXLargeJitter,
- &max_num_packets,
- &buffer_size_in_bytes,
- &per_packet_overhead_bytes) != 0) {
- LogError("GetRecommendedBufferSize", idx);
- return -1;
- }
- if (idx == 0) {
- min_of_buffer_size_bytes_ = buffer_size_in_bytes;
- min_of_max_num_packets_ = max_num_packets;
- per_packet_overhead_bytes_ = per_packet_overhead_bytes;
- } else {
- min_of_buffer_size_bytes_ = std::min(min_of_buffer_size_bytes_,
- buffer_size_in_bytes);
- min_of_max_num_packets_ = std::min(min_of_max_num_packets_,
- max_num_packets);
- }
- if (neteq_packet_buffer_[idx] != NULL) {
- free(neteq_packet_buffer_[idx]);
- neteq_packet_buffer_[idx] = NULL;
- }
-
- neteq_packet_buffer_[idx] = (int16_t *) malloc(buffer_size_in_bytes);
- if (neteq_packet_buffer_[idx] == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "AllocatePacketBufferByIdxSafe: NetEq Initialization error: "
- "could not allocate memory for NetEq Packet Buffer");
- return -1;
- }
- if (WebRtcNetEQ_AssignBuffer(inst_[idx], max_num_packets,
- neteq_packet_buffer_[idx],
- buffer_size_in_bytes) != 0) {
- if (neteq_packet_buffer_[idx] != NULL) {
- free(neteq_packet_buffer_[idx]);
- neteq_packet_buffer_[idx] = NULL;
- }
- LogError("AssignBuffer", idx);
- return -1;
- }
- return 0;
-}
-
-int32_t ACMNetEQ::SetAVTPlayout(const bool enable) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- if (avt_playout_ != enable) {
- for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
- if (!is_initialized_[idx]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "SetAVTPlayout: NetEq is not initialized.");
- return -1;
- }
- if (WebRtcNetEQ_SetAVTPlayout(inst_[idx], (enable) ? 1 : 0) < 0) {
- LogError("SetAVTPlayout", idx);
- return -1;
- }
- }
- }
- avt_playout_ = enable;
- return 0;
-}
-
-bool ACMNetEQ::avt_playout() const {
- CriticalSectionScoped lock(neteq_crit_sect_);
- return avt_playout_;
-}
-
-int32_t ACMNetEQ::CurrentSampFreqHz() const {
- CriticalSectionScoped lock(neteq_crit_sect_);
- if (!is_initialized_[0]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "CurrentSampFreqHz: NetEq is not initialized.");
- return -1;
- }
- return (int32_t)(1000 * current_samp_freq_khz_);
-}
-
-int32_t ACMNetEQ::SetPlayoutMode(const AudioPlayoutMode mode) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- if (playout_mode_ == mode)
- return 0;
-
- enum WebRtcNetEQPlayoutMode playout_mode = kPlayoutOff;
- enum WebRtcNetEQBGNMode background_noise_mode = kBGNOn;
- switch (mode) {
- case voice:
- playout_mode = kPlayoutOn;
- background_noise_mode = kBGNOn;
- break;
- case fax:
- playout_mode = kPlayoutFax;
- WebRtcNetEQ_GetBGNMode(inst_[0], &background_noise_mode); // No change.
- break;
- case streaming:
- playout_mode = kPlayoutStreaming;
- background_noise_mode = kBGNOff;
- break;
- case off:
- playout_mode = kPlayoutOff;
- background_noise_mode = kBGNOff;
- break;
- }
-
- int err = 0;
- for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
- if (!is_initialized_[idx]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "SetPlayoutMode: NetEq is not initialized.");
- return -1;
- }
-
- if (WebRtcNetEQ_SetPlayoutMode(inst_[idx], playout_mode) < 0) {
- LogError("SetPlayoutMode", idx);
- err = -1;
- }
-
- if (WebRtcNetEQ_SetBGNMode(inst_[idx], kBGNOff) < 0) {
- LogError("SetPlayoutMode::SetBGNMode", idx);
- err = -1;
- }
- }
- if (err == 0)
- playout_mode_ = mode;
- return err;
-}
-
-AudioPlayoutMode ACMNetEQ::playout_mode() const {
- CriticalSectionScoped lock(neteq_crit_sect_);
- return playout_mode_;
-}
-
-int32_t ACMNetEQ::NetworkStatistics(
- ACMNetworkStatistics* statistics) const {
- WebRtcNetEQ_NetworkStatistics stats;
- CriticalSectionScoped lock(neteq_crit_sect_);
- if (!is_initialized_[0]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "NetworkStatistics: NetEq is not initialized.");
- return -1;
- }
- if (WebRtcNetEQ_GetNetworkStatistics(inst_[0], &stats) == 0) {
- statistics->currentAccelerateRate = stats.currentAccelerateRate;
- statistics->currentBufferSize = stats.currentBufferSize;
- statistics->jitterPeaksFound = (stats.jitterPeaksFound > 0);
- statistics->currentDiscardRate = stats.currentDiscardRate;
- statistics->currentExpandRate = stats.currentExpandRate;
- statistics->currentPacketLossRate = stats.currentPacketLossRate;
- statistics->currentPreemptiveRate = stats.currentPreemptiveRate;
- statistics->preferredBufferSize = stats.preferredBufferSize;
- statistics->clockDriftPPM = stats.clockDriftPPM;
- statistics->addedSamples = stats.addedSamples;
- } else {
- LogError("getNetworkStatistics", 0);
- return -1;
- }
- const int kArrayLen = 100;
- int waiting_times[kArrayLen];
- int waiting_times_len = WebRtcNetEQ_GetRawFrameWaitingTimes(inst_[0],
- kArrayLen,
- waiting_times);
- if (waiting_times_len > 0) {
- std::vector<int> waiting_times_vec(waiting_times,
- waiting_times + waiting_times_len);
- std::sort(waiting_times_vec.begin(), waiting_times_vec.end());
- size_t size = waiting_times_vec.size();
- assert(size == static_cast<size_t>(waiting_times_len));
- if (size % 2 == 0) {
- statistics->medianWaitingTimeMs = (waiting_times_vec[size / 2 - 1] +
- waiting_times_vec[size / 2]) / 2;
- } else {
- statistics->medianWaitingTimeMs = waiting_times_vec[size / 2];
- }
- statistics->minWaitingTimeMs = waiting_times_vec.front();
- statistics->maxWaitingTimeMs = waiting_times_vec.back();
- double sum = 0;
- for (size_t i = 0; i < size; ++i) {
- sum += waiting_times_vec[i];
- }
- statistics->meanWaitingTimeMs = static_cast<int>(sum / size);
- } else if (waiting_times_len == 0) {
- statistics->meanWaitingTimeMs = -1;
- statistics->medianWaitingTimeMs = -1;
- statistics->minWaitingTimeMs = -1;
- statistics->maxWaitingTimeMs = -1;
- } else {
- LogError("getRawFrameWaitingTimes", 0);
- return -1;
- }
- return 0;
-}
-
-// Should only be called in AV-sync mode.
-int ACMNetEQ::RecIn(const WebRtcRTPHeader& rtp_info,
- uint32_t receive_timestamp) {
- assert(av_sync_);
-
- // Translate to NetEq structure.
- WebRtcNetEQ_RTPInfo neteq_rtpinfo;
- neteq_rtpinfo.payloadType = rtp_info.header.payloadType;
- neteq_rtpinfo.sequenceNumber = rtp_info.header.sequenceNumber;
- neteq_rtpinfo.timeStamp = rtp_info.header.timestamp;
- neteq_rtpinfo.SSRC = rtp_info.header.ssrc;
- neteq_rtpinfo.markerBit = rtp_info.header.markerBit;
-
- CriticalSectionScoped lock(neteq_crit_sect_);
-
- // Master should be initialized.
- assert(is_initialized_[0]);
-
- // Push into Master.
- int status = WebRtcNetEQ_RecInSyncRTP(inst_[0], &neteq_rtpinfo,
- receive_timestamp);
- if (status < 0) {
- LogError("RecInSyncRTP", 0);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecIn (sync): NetEq, error in pushing in Master");
- return -1;
- }
-
- // If the received stream is stereo, insert a sync payload into slave.
- if (rtp_info.type.Audio.channel == 2) {
- // Slave should be initialized.
- assert(is_initialized_[1]);
-
- // PUSH into Slave
- status = WebRtcNetEQ_RecInSyncRTP(inst_[1], &neteq_rtpinfo,
- receive_timestamp);
- if (status < 0) {
- LogError("RecInRTPStruct", 1);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecIn (sync): NetEq, error in pushing in Slave");
- return -1;
- }
- }
- return status;
-}
-
-int32_t ACMNetEQ::RecIn(const uint8_t* incoming_payload,
- const int32_t length_payload,
- const WebRtcRTPHeader& rtp_info,
- uint32_t receive_timestamp) {
- int16_t payload_length = static_cast<int16_t>(length_payload);
-
- // Translate to NetEq structure.
- WebRtcNetEQ_RTPInfo neteq_rtpinfo;
- neteq_rtpinfo.payloadType = rtp_info.header.payloadType;
- neteq_rtpinfo.sequenceNumber = rtp_info.header.sequenceNumber;
- neteq_rtpinfo.timeStamp = rtp_info.header.timestamp;
- neteq_rtpinfo.SSRC = rtp_info.header.ssrc;
- neteq_rtpinfo.markerBit = rtp_info.header.markerBit;
-
- CriticalSectionScoped lock(neteq_crit_sect_);
-
- int status;
- // In case of stereo payload, first half of the data should be pushed into
- // master, and the second half into slave.
- if (rtp_info.type.Audio.channel == 2) {
- payload_length = payload_length / 2;
- }
-
- // Check that master is initialized.
- if (!is_initialized_[0]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecIn: NetEq is not initialized.");
- return -1;
- }
- // Push into Master.
- status = WebRtcNetEQ_RecInRTPStruct(inst_[0], &neteq_rtpinfo,
- incoming_payload, payload_length,
- receive_timestamp);
- if (status < 0) {
- LogError("RecInRTPStruct", 0);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecIn: NetEq, error in pushing in Master");
- return -1;
- }
-
- // If the received stream is stereo, insert second half of paket into slave.
- if (rtp_info.type.Audio.channel == 2) {
- if (!is_initialized_[1]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecIn: NetEq is not initialized.");
- return -1;
- }
- // Push into Slave.
- status = WebRtcNetEQ_RecInRTPStruct(inst_[1], &neteq_rtpinfo,
- &incoming_payload[payload_length],
- payload_length, receive_timestamp);
- if (status < 0) {
- LogError("RecInRTPStruct", 1);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecIn: NetEq, error in pushing in Slave");
- return -1;
- }
- }
-
- return 0;
-}
-
-int32_t ACMNetEQ::RecOut(AudioFrame& audio_frame) {
- enum WebRtcNetEQOutputType type;
- int16_t payload_len_sample;
- enum WebRtcNetEQOutputType type_master;
- enum WebRtcNetEQOutputType type_slave;
-
- int16_t payload_len_sample_slave;
-
- CriticalSectionScoped lockNetEq(neteq_crit_sect_);
-
- if (!received_stereo_) {
- if (!is_initialized_[0]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecOut: NetEq is not initialized.");
- return -1;
- }
- {
- WriteLockScoped lockCodec(*decode_lock_);
- if (WebRtcNetEQ_RecOut(inst_[0], &(audio_frame.data_[0]),
- &payload_len_sample) != 0) {
- LogError("RecOut", 0);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecOut: NetEq, error in pulling out for mono case");
- // Check for errors that can be recovered from:
- // RECOUT_ERROR_SAMPLEUNDERRUN = 2003
- int error_code = WebRtcNetEQ_GetErrorCode(inst_[0]);
- if (error_code != 2003) {
- // Cannot recover; return an error
- return -1;
- }
- }
- }
- WebRtcNetEQ_GetSpeechOutputType(inst_[0], &type);
- audio_frame.num_channels_ = 1;
- } else {
- if (!is_initialized_[0] || !is_initialized_[1]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecOut: NetEq is not initialized.");
- return -1;
- }
- int16_t payload_master[480];
- int16_t payload_slave[480];
- {
- WriteLockScoped lockCodec(*decode_lock_);
- if (WebRtcNetEQ_RecOutMasterSlave(inst_[0], payload_master,
- &payload_len_sample, master_slave_info_,
- 1) != 0) {
- LogError("RecOutMasterSlave", 0);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecOut: NetEq, error in pulling out for master");
-
- // Check for errors that can be recovered from:
- // RECOUT_ERROR_SAMPLEUNDERRUN = 2003
- int error_code = WebRtcNetEQ_GetErrorCode(inst_[0]);
- if (error_code != 2003) {
- // Cannot recover; return an error
- return -1;
- }
- }
- if (WebRtcNetEQ_RecOutMasterSlave(inst_[1], payload_slave,
- &payload_len_sample_slave,
- master_slave_info_, 0) != 0) {
- LogError("RecOutMasterSlave", 1);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RecOut: NetEq, error in pulling out for slave");
-
- // Check for errors that can be recovered from:
- // RECOUT_ERROR_SAMPLEUNDERRUN = 2003
- int error_code = WebRtcNetEQ_GetErrorCode(inst_[1]);
- if (error_code != 2003) {
- // Cannot recover; return an error
- return -1;
- }
- }
- }
-
- if (payload_len_sample != payload_len_sample_slave) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
- "RecOut: mismatch between the lenght of the decoded audio "
- "by Master (%d samples) and Slave (%d samples).",
- payload_len_sample, payload_len_sample_slave);
- if (payload_len_sample > payload_len_sample_slave) {
- memset(&payload_slave[payload_len_sample_slave], 0,
- (payload_len_sample - payload_len_sample_slave) *
- sizeof(int16_t));
- }
- }
-
- for (int16_t n = 0; n < payload_len_sample; n++) {
- audio_frame.data_[n << 1] = payload_master[n];
- audio_frame.data_[(n << 1) + 1] = payload_slave[n];
- }
- audio_frame.num_channels_ = 2;
-
- WebRtcNetEQ_GetSpeechOutputType(inst_[0], &type_master);
- WebRtcNetEQ_GetSpeechOutputType(inst_[1], &type_slave);
- if ((type_master == kOutputNormal) || (type_slave == kOutputNormal)) {
- type = kOutputNormal;
- } else {
- type = type_master;
- }
- }
-
- audio_frame.samples_per_channel_ =
- static_cast<uint16_t>(payload_len_sample);
- // NetEq always returns 10 ms of audio.
- current_samp_freq_khz_ =
- static_cast<float>(audio_frame.samples_per_channel_) / 10.0f;
- audio_frame.sample_rate_hz_ = audio_frame.samples_per_channel_ * 100;
- if (vad_status_) {
- if (type == kOutputVADPassive) {
- audio_frame.vad_activity_ = AudioFrame::kVadPassive;
- audio_frame.speech_type_ = AudioFrame::kNormalSpeech;
- } else if (type == kOutputNormal) {
- audio_frame.vad_activity_ = AudioFrame::kVadActive;
- audio_frame.speech_type_ = AudioFrame::kNormalSpeech;
- } else if (type == kOutputPLC) {
- audio_frame.vad_activity_ = previous_audio_activity_;
- audio_frame.speech_type_ = AudioFrame::kPLC;
- } else if (type == kOutputCNG) {
- audio_frame.vad_activity_ = AudioFrame::kVadPassive;
- audio_frame.speech_type_ = AudioFrame::kCNG;
- } else {
- audio_frame.vad_activity_ = AudioFrame::kVadPassive;
- audio_frame.speech_type_ = AudioFrame::kPLCCNG;
- }
- } else {
- // Always return kVadUnknown when receive VAD is inactive
- audio_frame.vad_activity_ = AudioFrame::kVadUnknown;
- if (type == kOutputNormal) {
- audio_frame.speech_type_ = AudioFrame::kNormalSpeech;
- } else if (type == kOutputPLC) {
- audio_frame.speech_type_ = AudioFrame::kPLC;
- } else if (type == kOutputPLCtoCNG) {
- audio_frame.speech_type_ = AudioFrame::kPLCCNG;
- } else if (type == kOutputCNG) {
- audio_frame.speech_type_ = AudioFrame::kCNG;
- } else {
- // type is kOutputVADPassive which
- // we don't expect to get if vad_status_ is false
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
- "RecOut: NetEq returned kVadPassive while vad_status_ is "
- "false.");
- audio_frame.vad_activity_ = AudioFrame::kVadUnknown;
- audio_frame.speech_type_ = AudioFrame::kNormalSpeech;
- }
- }
- previous_audio_activity_ = audio_frame.vad_activity_;
-
- WebRtcNetEQ_ProcessingActivity processing_stats;
- WebRtcNetEQ_GetProcessingActivity(inst_[0], &processing_stats);
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, id_,
- "ACM::RecOut accelerate_bgn=%d accelerate_normal=%d"
- " expand_bgn=%d expand_normal=%d"
- " preemptive_bgn=%d preemptive_normal=%d"
- " merge_bgn=%d merge_normal=%d",
- processing_stats.accelerate_bgn_samples,
- processing_stats.accelerate_normal_samples,
- processing_stats.expand_bgn_sampels,
- processing_stats.expand_normal_samples,
- processing_stats.preemptive_expand_bgn_samples,
- processing_stats.preemptive_expand_normal_samples,
- processing_stats.merge_expand_bgn_samples,
- processing_stats.merge_expand_normal_samples);
- return 0;
-}
-
-// When ACMGenericCodec has set the codec specific parameters in codec_def
-// it calls AddCodec() to add the new codec to the NetEQ database.
-int32_t ACMNetEQ::AddCodec(WebRtcNetEQ_CodecDef* codec_def,
- bool to_master) {
- if (codec_def == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "ACMNetEQ::AddCodec: error, codec_def is NULL");
- return -1;
- }
- CriticalSectionScoped lock(neteq_crit_sect_);
-
- int16_t idx;
- if (to_master) {
- idx = 0;
- } else {
- idx = 1;
- }
-
- if (!is_initialized_[idx]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "ACMNetEQ::AddCodec: NetEq is not initialized.");
- return -1;
- }
- if (WebRtcNetEQ_CodecDbAdd(inst_[idx], codec_def) < 0) {
- LogError("CodecDB_Add", idx);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "ACMNetEQ::AddCodec: NetEq, error in adding codec");
- return -1;
- } else {
- return 0;
- }
-}
-
-// Creates a Word16 RTP packet out of a Word8 payload and an rtp info struct.
-// Must be byte order safe.
-void ACMNetEQ::RTPPack(int16_t* rtp_packet, const int8_t* payload,
- const int32_t payload_length_bytes,
- const WebRtcRTPHeader& rtp_info) {
- int32_t idx = 0;
- WEBRTC_SPL_SET_BYTE(rtp_packet, (int8_t) 0x80, idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet, rtp_info.header.payloadType, idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet,
- WEBRTC_SPL_GET_BYTE(&(rtp_info.header.sequenceNumber), 1),
- idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet,
- WEBRTC_SPL_GET_BYTE(&(rtp_info.header.sequenceNumber), 0),
- idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet,
- WEBRTC_SPL_GET_BYTE(&(rtp_info.header.timestamp), 3),
- idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet,
- WEBRTC_SPL_GET_BYTE(&(rtp_info.header.timestamp), 2),
- idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet,
- WEBRTC_SPL_GET_BYTE(&(rtp_info.header.timestamp), 1),
- idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet,
- WEBRTC_SPL_GET_BYTE(&(rtp_info.header.timestamp), 0),
- idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet,
- WEBRTC_SPL_GET_BYTE(&(rtp_info.header.ssrc), 3), idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet, WEBRTC_SPL_GET_BYTE(&(rtp_info.header.ssrc),
- 2), idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet, WEBRTC_SPL_GET_BYTE(&(rtp_info.header.ssrc),
- 1), idx);
- idx++;
- WEBRTC_SPL_SET_BYTE(rtp_packet, WEBRTC_SPL_GET_BYTE(&(rtp_info.header.ssrc),
- 0), idx);
- idx++;
- for (int16_t i = 0; i < payload_length_bytes; i++) {
- WEBRTC_SPL_SET_BYTE(rtp_packet, payload[i], idx);
- idx++;
- }
- if (payload_length_bytes & 1) {
- // Our 16 bits buffer is one byte too large, set that
- // last byte to zero.
- WEBRTC_SPL_SET_BYTE(rtp_packet, 0x0, idx);
- }
-}
-
-int16_t ACMNetEQ::EnableVAD() {
- CriticalSectionScoped lock(neteq_crit_sect_);
- if (vad_status_) {
- return 0;
- }
- for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
- if (!is_initialized_[idx]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "SetVADStatus: NetEq is not initialized.");
- return -1;
- }
- // VAD was off and we have to turn it on
- if (EnableVADByIdxSafe(idx) < 0) {
- return -1;
- }
-
- // Set previous VAD status to PASSIVE
- previous_audio_activity_ = AudioFrame::kVadPassive;
- }
- vad_status_ = true;
- return 0;
-}
-
-ACMVADMode ACMNetEQ::vad_mode() const {
- CriticalSectionScoped lock(neteq_crit_sect_);
- return vad_mode_;
-}
-
-int16_t ACMNetEQ::SetVADMode(const ACMVADMode mode) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- if ((mode < VADNormal) || (mode > VADVeryAggr)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "SetVADMode: NetEq error: could not set VAD mode, mode is not "
- "supported");
- return -1;
- } else {
- for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
- if (!is_initialized_[idx]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "SetVADMode: NetEq is not initialized.");
- return -1;
- }
- if (WebRtcNetEQ_SetVADMode(inst_[idx], mode) < 0) {
- LogError("SetVADmode", idx);
- return -1;
- }
- }
- vad_mode_ = mode;
- return 0;
- }
-}
-
-int32_t ACMNetEQ::FlushBuffers() {
- CriticalSectionScoped lock(neteq_crit_sect_);
- for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
- if (!is_initialized_[idx]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "FlushBuffers: NetEq is not initialized.");
- return -1;
- }
- if (WebRtcNetEQ_FlushBuffers(inst_[idx]) < 0) {
- LogError("FlushBuffers", idx);
- return -1;
- }
- }
- return 0;
-}
-
-int16_t ACMNetEQ::RemoveCodec(WebRtcNetEQDecoder codec_idx,
- bool is_stereo) {
- // sanity check
- if ((codec_idx <= kDecoderReservedStart) ||
- (codec_idx >= kDecoderReservedEnd)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RemoveCodec: NetEq error: could not Remove Codec, codec "
- "index out of range");
- return -1;
- }
- CriticalSectionScoped lock(neteq_crit_sect_);
- if (!is_initialized_[0]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RemoveCodec: NetEq is not initialized.");
- return -1;
- }
-
- if (WebRtcNetEQ_CodecDbRemove(inst_[0], codec_idx) < 0) {
- LogError("CodecDB_Remove", 0);
- return -1;
- }
-
- if (is_stereo) {
- if (WebRtcNetEQ_CodecDbRemove(inst_[1], codec_idx) < 0) {
- LogError("CodecDB_Remove", 1);
- return -1;
- }
- }
-
- return 0;
-}
-
-int16_t ACMNetEQ::SetBackgroundNoiseMode(
- const ACMBackgroundNoiseMode mode) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
- if (!is_initialized_[idx]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "SetBackgroundNoiseMode: NetEq is not initialized.");
- return -1;
- }
- if (WebRtcNetEQ_SetBGNMode(inst_[idx], (WebRtcNetEQBGNMode) mode) < 0) {
- LogError("SetBGNMode", idx);
- return -1;
- }
- }
- return 0;
-}
-
-int16_t ACMNetEQ::BackgroundNoiseMode(ACMBackgroundNoiseMode& mode) {
- WebRtcNetEQBGNMode my_mode;
- CriticalSectionScoped lock(neteq_crit_sect_);
- if (!is_initialized_[0]) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "BackgroundNoiseMode: NetEq is not initialized.");
- return -1;
- }
- if (WebRtcNetEQ_GetBGNMode(inst_[0], &my_mode) < 0) {
- LogError("WebRtcNetEQ_GetBGNMode", 0);
- return -1;
- } else {
- mode = (ACMBackgroundNoiseMode) my_mode;
- }
- return 0;
-}
-
-void ACMNetEQ::set_id(int32_t id) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- id_ = id;
-}
-
-void ACMNetEQ::LogError(const char* neteq_func_name,
- const int16_t idx) const {
- char error_name[NETEQ_ERR_MSG_LEN_BYTE];
- char my_func_name[50];
- int neteq_error_code = WebRtcNetEQ_GetErrorCode(inst_[idx]);
- WebRtcNetEQ_GetErrorName(neteq_error_code, error_name,
- NETEQ_ERR_MSG_LEN_BYTE - 1);
- strncpy(my_func_name, neteq_func_name, 49);
- error_name[NETEQ_ERR_MSG_LEN_BYTE - 1] = '\0';
- my_func_name[49] = '\0';
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "NetEq-%d Error in function %s, error-code: %d, error-string: "
- " %s", idx, my_func_name, neteq_error_code, error_name);
-}
-
-int32_t ACMNetEQ::PlayoutTimestamp(uint32_t& timestamp) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- if (WebRtcNetEQ_GetSpeechTimeStamp(inst_[0], &timestamp) < 0) {
- LogError("GetSpeechTimeStamp", 0);
- return -1;
- } else {
- return 0;
- }
-}
-
-void ACMNetEQ::RemoveSlaves() {
- CriticalSectionScoped lock(neteq_crit_sect_);
- RemoveSlavesSafe();
-}
-
-void ACMNetEQ::RemoveSlavesSafe() {
- for (int i = 1; i < num_slaves_ + 1; i++) {
- RemoveNetEQSafe(i);
- }
-
- if (master_slave_info_ != NULL) {
- free(master_slave_info_);
- master_slave_info_ = NULL;
- }
- num_slaves_ = 0;
-}
-
-void ACMNetEQ::RemoveNetEQSafe(int index) {
- if (inst_mem_[index] != NULL) {
- free(inst_mem_[index]);
- inst_mem_[index] = NULL;
- inst_[index] = NULL;
- }
- if (neteq_packet_buffer_[index] != NULL) {
- free(neteq_packet_buffer_[index]);
- neteq_packet_buffer_[index] = NULL;
- }
- if (ptr_vadinst_[index] != NULL) {
- WebRtcVad_Free(ptr_vadinst_[index]);
- ptr_vadinst_[index] = NULL;
- }
-}
-
-int16_t ACMNetEQ::AddSlave(const WebRtcNetEQDecoder* used_codecs,
- int16_t num_codecs) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- const int16_t slave_idx = 1;
- if (num_slaves_ < 1) {
- // initialize the receiver, this also sets up VAD.
- if (InitByIdxSafe(slave_idx) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "AddSlave: AddSlave Failed, Could not Initialize");
- return -1;
- }
-
- // Allocate buffer.
- if (AllocatePacketBufferByIdxSafe(used_codecs, num_codecs,
- slave_idx) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "AddSlave: AddSlave Failed, Could not Allocate Packet "
- "Buffer");
- return -1;
- }
-
- if (master_slave_info_ != NULL) {
- free(master_slave_info_);
- master_slave_info_ = NULL;
- }
- int ms_info_size = WebRtcNetEQ_GetMasterSlaveInfoSize();
- master_slave_info_ = malloc(ms_info_size);
-
- if (master_slave_info_ == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "AddSlave: AddSlave Failed, Could not Allocate memory for "
- "Master-Slave Info");
- return -1;
- }
-
- // We accept this as initialized NetEQ, the rest is to synchronize
- // Slave with Master.
- num_slaves_ = 1;
- is_initialized_[slave_idx] = true;
-
- // Set AVT
- if (WebRtcNetEQ_SetAVTPlayout(inst_[slave_idx],
- (avt_playout_) ? 1 : 0) < 0) {
- LogError("SetAVTPlayout", slave_idx);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "AddSlave: AddSlave Failed, Could not set AVT playout.");
- return -1;
- }
-
- // Set Background Noise
- WebRtcNetEQBGNMode current_mode;
- if (WebRtcNetEQ_GetBGNMode(inst_[0], &current_mode) < 0) {
- LogError("GetBGNMode", 0);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "AAddSlave: AddSlave Failed, Could not Get BGN form "
- "Master.");
- return -1;
- }
-
- if (WebRtcNetEQ_SetBGNMode(inst_[slave_idx],
- (WebRtcNetEQBGNMode) current_mode) < 0) {
- LogError("SetBGNMode", slave_idx);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "AddSlave: AddSlave Failed, Could not set BGN mode.");
- return -1;
- }
-
- enum WebRtcNetEQPlayoutMode playout_mode = kPlayoutOff;
- switch (playout_mode_) {
- case voice:
- playout_mode = kPlayoutOn;
- break;
- case fax:
- playout_mode = kPlayoutFax;
- break;
- case streaming:
- playout_mode = kPlayoutStreaming;
- break;
- case off:
- playout_mode = kPlayoutOff;
- break;
- }
- if (WebRtcNetEQ_SetPlayoutMode(inst_[slave_idx], playout_mode) < 0) {
- LogError("SetPlayoutMode", 1);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "AddSlave: AddSlave Failed, Could not Set Playout Mode.");
- return -1;
- }
-
- // Set AV-sync for the slave.
- WebRtcNetEQ_EnableAVSync(inst_[slave_idx], av_sync_ ? 1 : 0);
-
- // Set minimum delay.
- if (minimum_delay_ms_ > 0)
- WebRtcNetEQ_SetMinimumDelay(inst_[slave_idx], minimum_delay_ms_);
-
- // Set maximum delay.
- if (maximum_delay_ms_ > 0)
- WebRtcNetEQ_SetMaximumDelay(inst_[slave_idx], maximum_delay_ms_);
- }
-
- return 0;
-}
-
-void ACMNetEQ::set_received_stereo(bool received_stereo) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- received_stereo_ = received_stereo;
-}
-
-uint8_t ACMNetEQ::num_slaves() {
- CriticalSectionScoped lock(neteq_crit_sect_);
- return num_slaves_;
-}
-
-void ACMNetEQ::EnableAVSync(bool enable) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- av_sync_ = enable;
- for (int i = 0; i < num_slaves_ + 1; ++i) {
- assert(is_initialized_[i]);
- WebRtcNetEQ_EnableAVSync(inst_[i], enable ? 1 : 0);
- }
-}
-
-int ACMNetEQ::SetMinimumDelay(int minimum_delay_ms) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- for (int i = 0; i < num_slaves_ + 1; ++i) {
- assert(is_initialized_[i]);
- if (WebRtcNetEQ_SetMinimumDelay(inst_[i], minimum_delay_ms) < 0)
- return -1;
- }
- minimum_delay_ms_ = minimum_delay_ms;
- return 0;
-}
-
-int ACMNetEQ::SetMaximumDelay(int maximum_delay_ms) {
- CriticalSectionScoped lock(neteq_crit_sect_);
- for (int i = 0; i < num_slaves_ + 1; ++i) {
- assert(is_initialized_[i]);
- if (WebRtcNetEQ_SetMaximumDelay(inst_[i], maximum_delay_ms) < 0)
- return -1;
- }
- maximum_delay_ms_ = maximum_delay_ms;
- return 0;
-}
-
-int ACMNetEQ::LeastRequiredDelayMs() const {
- CriticalSectionScoped lock(neteq_crit_sect_);
- assert(is_initialized_[0]);
-
- // Sufficient to query the master.
- return WebRtcNetEQ_GetRequiredDelayMs(inst_[0]);
-}
-
-bool ACMNetEQ::DecodedRtpInfo(int* sequence_number, uint32_t* timestamp) const {
- CriticalSectionScoped lock(neteq_crit_sect_);
- if (WebRtcNetEQ_DecodedRtpInfo(inst_[0], sequence_number, timestamp) < 0)
- return false;
- return true;
-}
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq.h
deleted file mode 100644
index e52ddc79571..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq.h
+++ /dev/null
@@ -1,399 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_NETEQ_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_NETEQ_H_
-
-#include "webrtc/common_audio/vad/include/webrtc_vad.h"
-#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class CriticalSectionWrapper;
-class RWLockWrapper;
-struct CodecInst;
-
-namespace acm1 {
-
-#define MAX_NUM_SLAVE_NETEQ 1
-
-class ACMNetEQ {
- public:
- enum JitterBuffer {
- kMasterJb = 0,
- kSlaveJb = 1
- };
-
- // Constructor of the class
- ACMNetEQ();
-
- // Destructor of the class.
- ~ACMNetEQ();
-
- //
- // Init()
- // Allocates memory for NetEQ and VAD and initializes them.
- //
- // Return value : 0 if ok.
- // -1 if NetEQ or VAD returned an error or
- // if out of memory.
- //
- int32_t Init();
-
- //
- // RecIn()
- // Gives the payload to NetEQ.
- //
- // Input:
- // - incoming_payload : Incoming audio payload.
- // - length_payload : Length of incoming audio payload.
- // - rtp_info : RTP header for the incoming payload containing
- // information about payload type, sequence number,
- // timestamp, SSRC and marker bit.
- // - receive_timestamp : received timestamp.
- //
- // Return value : 0 if ok.
- // <0 if NetEQ returned an error.
- //
- int32_t RecIn(const uint8_t* incoming_payload,
- const int32_t length_payload,
- const WebRtcRTPHeader& rtp_info,
- uint32_t receive_timestamp);
-
- //
- // RecIn()
- // Insert a sync payload to NetEq. Should only be called if |av_sync_| is
- // enabled;
- //
- // Input:
- // - rtp_info : RTP header for the incoming payload containing
- // information about payload type, sequence number,
- // timestamp, SSRC and marker bit.
- // - receive_timestamp : received timestamp.
- //
- // Return value : 0 if ok.
- // <0 if NetEQ returned an error.
- //
- int RecIn(const WebRtcRTPHeader& rtp_info, uint32_t receive_timestamp);
-
- //
- // RecOut()
- // Asks NetEQ for 10 ms of decoded audio.
- //
- // Input:
- // -audio_frame : an audio frame were output data and
- // associated parameters are written to.
- //
- // Return value : 0 if ok.
- // -1 if NetEQ returned an error.
- //
- int32_t RecOut(AudioFrame& audio_frame);
-
- //
- // AddCodec()
- // Adds a new codec to the NetEQ codec database.
- //
- // Input:
- // - codec_def : The codec to be added.
- // - to_master : true if the codec has to be added to Master
- // NetEq, otherwise will be added to the Slave
- // NetEQ.
- //
- // Return value : 0 if ok.
- // <0 if NetEQ returned an error.
- //
- int32_t AddCodec(WebRtcNetEQ_CodecDef *codec_def,
- bool to_master = true);
-
- //
- // AllocatePacketBuffer()
- // Allocates the NetEQ packet buffer.
- //
- // Input:
- // - used_codecs : An array of the codecs to be used by NetEQ.
- // - num_codecs : Number of codecs in used_codecs.
- //
- // Return value : 0 if ok.
- // <0 if NetEQ returned an error.
- //
- int32_t AllocatePacketBuffer(const WebRtcNetEQDecoder* used_codecs,
- int16_t num_codecs);
-
- //
- // SetAVTPlayout()
- // Enable/disable playout of AVT payloads.
- //
- // Input:
- // - enable : Enable if true, disable if false.
- //
- // Return value : 0 if ok.
- // <0 if NetEQ returned an error.
- //
- int32_t SetAVTPlayout(const bool enable);
-
- //
- // AVTPlayout()
- // Get the current AVT playout state.
- //
- // Return value : True if AVT playout is enabled.
- // False if AVT playout is disabled.
- //
- bool avt_playout() const;
-
- //
- // CurrentSampFreqHz()
- // Get the current sampling frequency in Hz.
- //
- // Return value : Sampling frequency in Hz.
- //
- int32_t CurrentSampFreqHz() const;
-
- //
- // SetPlayoutMode()
- // Sets the playout mode to voice or fax.
- //
- // Input:
- // - mode : The playout mode to be used, voice,
- // fax, or streaming.
- //
- // Return value : 0 if ok.
- // <0 if NetEQ returned an error.
- //
- int32_t SetPlayoutMode(const AudioPlayoutMode mode);
-
- //
- // PlayoutMode()
- // Get the current playout mode.
- //
- // Return value : The current playout mode.
- //
- AudioPlayoutMode playout_mode() const;
-
- //
- // NetworkStatistics()
- // Get the current network statistics from NetEQ.
- //
- // Output:
- // - statistics : The current network statistics.
- //
- // Return value : 0 if ok.
- // <0 if NetEQ returned an error.
- //
- int32_t NetworkStatistics(ACMNetworkStatistics* statistics) const;
-
- //
- // VADMode()
- // Get the current VAD Mode.
- //
- // Return value : The current VAD mode.
- //
- ACMVADMode vad_mode() const;
-
- //
- // SetVADMode()
- // Set the VAD mode.
- //
- // Input:
- // - mode : The new VAD mode.
- //
- // Return value : 0 if ok.
- // -1 if an error occurred.
- //
- int16_t SetVADMode(const ACMVADMode mode);
-
- //
- // DecodeLock()
- // Get the decode lock used to protect decoder instances while decoding.
- //
- // Return value : Pointer to the decode lock.
- //
- RWLockWrapper* DecodeLock() const {
- return decode_lock_;
- }
-
- //
- // FlushBuffers()
- // Flushes the NetEQ packet and speech buffers.
- //
- // Return value : 0 if ok.
- // -1 if NetEQ returned an error.
- //
- int32_t FlushBuffers();
-
- //
- // RemoveCodec()
- // Removes a codec from the NetEQ codec database.
- //
- // Input:
- // - codec_idx : Codec to be removed.
- //
- // Return value : 0 if ok.
- // -1 if an error occurred.
- //
- int16_t RemoveCodec(WebRtcNetEQDecoder codec_idx,
- bool is_stereo = false);
-
- //
- // SetBackgroundNoiseMode()
- // Set the mode of the background noise.
- //
- // Input:
- // - mode : an enumerator specifying the mode of the
- // background noise.
- //
- // Return value : 0 if succeeded,
- // -1 if failed to set the mode.
- //
- int16_t SetBackgroundNoiseMode(const ACMBackgroundNoiseMode mode);
-
- //
- // BackgroundNoiseMode()
- // return the mode of the background noise.
- //
- // Return value : The mode of background noise.
- //
- int16_t BackgroundNoiseMode(ACMBackgroundNoiseMode& mode);
-
- void set_id(int32_t id);
-
- int32_t PlayoutTimestamp(uint32_t& timestamp);
-
- void set_received_stereo(bool received_stereo);
-
- uint8_t num_slaves();
-
- // Delete all slaves.
- void RemoveSlaves();
-
- int16_t AddSlave(const WebRtcNetEQDecoder* used_codecs,
- int16_t num_codecs);
-
- void BufferSpec(int& num_packets, int& size_bytes, int& overhead_bytes) {
- num_packets = min_of_max_num_packets_;
- size_bytes = min_of_buffer_size_bytes_;
- overhead_bytes = per_packet_overhead_bytes_;
- }
-
- //
- // Set AV-sync mode.
- //
- void EnableAVSync(bool enable);
-
- //
- // Get sequence number and timestamp of the last decoded RTP.
- //
- bool DecodedRtpInfo(int* sequence_number, uint32_t* timestamp) const;
-
- //
- // Set a minimum delay in NetEq. Unless channel condition dictates a longer
- // delay, the given delay is maintained by NetEq.
- //
- int SetMinimumDelay(int minimum_delay_ms);
-
- //
- // Set a maximum delay in NetEq.
- //
- int SetMaximumDelay(int maximum_delay_ms);
-
- //
- // The shortest latency, in milliseconds, required by jitter buffer. This
- // is computed based on inter-arrival times and playout mode of NetEq. The
- // actual delay is the maximum of least-required-delay and the minimum-delay
- // specified by SetMinumumPlayoutDelay() API.
- //
- int LeastRequiredDelayMs() const ;
-
- private:
- //
- // RTPPack()
- // Creates a Word16 RTP packet out of the payload data in Word16 and
- // a WebRtcRTPHeader.
- //
- // Input:
- // - payload : Payload to be packetized.
- // - payload_length_bytes : Length of the payload in bytes.
- // - rtp_info : RTP header structure.
- //
- // Output:
- // - rtp_packet : The RTP packet.
- //
- static void RTPPack(int16_t* rtp_packet, const int8_t* payload,
- const int32_t payload_length_bytes,
- const WebRtcRTPHeader& rtp_info);
-
- void LogError(const char* neteq_func_name, const int16_t idx) const;
-
- int16_t InitByIdxSafe(const int16_t idx);
-
- //
- // EnableVAD()
- // Enable VAD.
- //
- // Return value : 0 if ok.
- // -1 if an error occurred.
- //
- int16_t EnableVAD();
-
- int16_t EnableVADByIdxSafe(const int16_t idx);
-
- int16_t AllocatePacketBufferByIdxSafe(
- const WebRtcNetEQDecoder* used_codecs,
- int16_t num_codecs,
- const int16_t idx);
-
- // Delete the NetEQ corresponding to |index|.
- void RemoveNetEQSafe(int index);
-
- void RemoveSlavesSafe();
-
- void* inst_[MAX_NUM_SLAVE_NETEQ + 1];
- void* inst_mem_[MAX_NUM_SLAVE_NETEQ + 1];
-
- int16_t* neteq_packet_buffer_[MAX_NUM_SLAVE_NETEQ + 1];
-
- int32_t id_;
- float current_samp_freq_khz_;
- bool avt_playout_;
- AudioPlayoutMode playout_mode_;
- CriticalSectionWrapper* neteq_crit_sect_;
-
- WebRtcVadInst* ptr_vadinst_[MAX_NUM_SLAVE_NETEQ + 1];
-
- bool vad_status_;
- ACMVADMode vad_mode_;
- RWLockWrapper* decode_lock_;
- bool is_initialized_[MAX_NUM_SLAVE_NETEQ + 1];
- uint8_t num_slaves_;
- bool received_stereo_;
- void* master_slave_info_;
- AudioFrame::VADActivity previous_audio_activity_;
-
- CriticalSectionWrapper* callback_crit_sect_;
- // Minimum of "max number of packets," among all NetEq instances.
- int min_of_max_num_packets_;
- // Minimum of buffer-size among all NetEq instances.
- int min_of_buffer_size_bytes_;
- int per_packet_overhead_bytes_;
-
- // Keep track of AV-sync. Just used to set the slave when a slave is added.
- bool av_sync_;
-
- int minimum_delay_ms_;
- int maximum_delay_ms_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_NETEQ_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq_unittest.cc
deleted file mode 100644
index 8b973ba230f..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_neteq_unittest.cc
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// This file contains unit tests for ACM's NetEQ wrapper (class ACMNetEQ).
-
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-
-#include <stdlib.h>
-
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
-#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-class AcmNetEqTest : public ::testing::Test {
- protected:
- static const size_t kMaxPayloadLen = 5760; // 60 ms, 48 kHz, 16 bit samples.
- static const int kPcm16WbPayloadType = 94;
- AcmNetEqTest() {}
- virtual void SetUp();
- virtual void TearDown() {}
-
- void InsertZeroPacket(uint16_t sequence_number,
- uint32_t timestamp,
- uint8_t payload_type,
- uint32_t ssrc,
- bool marker_bit,
- size_t len_payload_bytes);
- void PullData(int expected_num_samples);
-
- ACMNetEQ neteq_;
-};
-
-void AcmNetEqTest::SetUp() {
- ASSERT_EQ(0, neteq_.Init());
- ASSERT_EQ(0, neteq_.AllocatePacketBuffer(ACMCodecDB::NetEQDecoders(),
- ACMCodecDB::kNumCodecs));
- WebRtcNetEQ_CodecDef codec_def;
- SET_CODEC_PAR(codec_def, kDecoderPCM16Bwb, kPcm16WbPayloadType, NULL, 16000);
- SET_PCM16B_WB_FUNCTIONS(codec_def);
- ASSERT_EQ(0, neteq_.AddCodec(&codec_def, true));
-}
-
-void AcmNetEqTest::InsertZeroPacket(uint16_t sequence_number,
- uint32_t timestamp,
- uint8_t payload_type,
- uint32_t ssrc,
- bool marker_bit,
- size_t len_payload_bytes) {
- ASSERT_TRUE(len_payload_bytes <= kMaxPayloadLen);
- uint16_t payload[kMaxPayloadLen] = {0};
- WebRtcRTPHeader rtp_header;
- rtp_header.header.sequenceNumber = sequence_number;
- rtp_header.header.timestamp = timestamp;
- rtp_header.header.ssrc = ssrc;
- rtp_header.header.payloadType = payload_type;
- rtp_header.header.markerBit = marker_bit;
- rtp_header.type.Audio.channel = 1;
- // Receive timestamp can be set to send timestamp in this test.
- ASSERT_EQ(0, neteq_.RecIn(reinterpret_cast<uint8_t*>(payload),
- len_payload_bytes, rtp_header, timestamp));
-}
-
-void AcmNetEqTest::PullData(int expected_num_samples) {
- AudioFrame out_frame;
- ASSERT_EQ(0, neteq_.RecOut(out_frame));
- ASSERT_EQ(expected_num_samples, out_frame.samples_per_channel_);
-}
-
-TEST_F(AcmNetEqTest, NetworkStatistics) {
- // Use fax mode to avoid time-scaling. This is to simplify the testing of
- // packet waiting times in the packet buffer.
- neteq_.SetPlayoutMode(fax);
- // Insert 31 dummy packets at once. Each packet contains 10 ms 16 kHz audio.
- int num_frames = 30;
- const int kSamples = 10 * 16;
- const int kPayloadBytes = kSamples * 2;
- int i, j;
- for (i = 0; i < num_frames; ++i) {
- InsertZeroPacket(i, i * kSamples, kPcm16WbPayloadType, 0x1234, false,
- kPayloadBytes);
- }
- // Pull out data once.
- PullData(kSamples);
- // Insert one more packet (to produce different mean and median).
- i = num_frames;
- InsertZeroPacket(i, i * kSamples, kPcm16WbPayloadType, 0x1234, false,
- kPayloadBytes);
- // Pull out all data.
- for (j = 1; j < num_frames + 1; ++j) {
- PullData(kSamples);
- }
-
- ACMNetworkStatistics stats;
- ASSERT_EQ(0, neteq_.NetworkStatistics(&stats));
- EXPECT_EQ(0, stats.currentBufferSize);
- EXPECT_EQ(0, stats.preferredBufferSize);
- EXPECT_FALSE(stats.jitterPeaksFound);
- EXPECT_EQ(0, stats.currentPacketLossRate);
- EXPECT_EQ(0, stats.currentDiscardRate);
- EXPECT_EQ(0, stats.currentExpandRate);
- EXPECT_EQ(0, stats.currentPreemptiveRate);
- EXPECT_EQ(0, stats.currentAccelerateRate);
- EXPECT_EQ(-916, stats.clockDriftPPM); // Initial value is slightly off.
- EXPECT_EQ(300, stats.maxWaitingTimeMs);
- EXPECT_EQ(10, stats.minWaitingTimeMs);
- EXPECT_EQ(159, stats.meanWaitingTimeMs);
- EXPECT_EQ(160, stats.medianWaitingTimeMs);
-}
-
-TEST_F(AcmNetEqTest, TestZeroLengthWaitingTimesVector) {
- // Insert one packet.
- const int kSamples = 10 * 16;
- const int kPayloadBytes = kSamples * 2;
- int i = 0;
- InsertZeroPacket(i, i * kSamples, kPcm16WbPayloadType, 0x1234, false,
- kPayloadBytes);
- // Do not pull out any data.
-
- ACMNetworkStatistics stats;
- ASSERT_EQ(0, neteq_.NetworkStatistics(&stats));
- EXPECT_EQ(0, stats.currentBufferSize);
- EXPECT_EQ(0, stats.preferredBufferSize);
- EXPECT_FALSE(stats.jitterPeaksFound);
- EXPECT_EQ(0, stats.currentPacketLossRate);
- EXPECT_EQ(0, stats.currentDiscardRate);
- EXPECT_EQ(0, stats.currentExpandRate);
- EXPECT_EQ(0, stats.currentPreemptiveRate);
- EXPECT_EQ(0, stats.currentAccelerateRate);
- EXPECT_EQ(-916, stats.clockDriftPPM); // Initial value is slightly off.
- EXPECT_EQ(-1, stats.minWaitingTimeMs);
- EXPECT_EQ(-1, stats.maxWaitingTimeMs);
- EXPECT_EQ(-1, stats.meanWaitingTimeMs);
- EXPECT_EQ(-1, stats.medianWaitingTimeMs);
-}
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_opus.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_opus.cc
deleted file mode 100644
index 413f3715fc6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_opus.cc
+++ /dev/null
@@ -1,319 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_opus.h"
-
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_OPUS
-#include "webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_OPUS
-
-ACMOpus::ACMOpus(int16_t /* codec_id */)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL),
- sample_freq_(0),
- bitrate_(0),
- channels_(1) {
- return;
-}
-
-ACMOpus::~ACMOpus() {
- return;
-}
-
-int16_t ACMOpus::InternalEncode(uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMOpus::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMOpus::InternalInitEncoder(WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMOpus::InternalInitDecoder(WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMOpus::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMOpus::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMOpus::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMOpus::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMOpus::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMOpus::DestructDecoderSafe() {
- return;
-}
-
-void ACMOpus::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-int16_t ACMOpus::SetBitRateSafe(const int32_t /*rate*/) {
- return -1;
-}
-
-bool ACMOpus::IsTrueStereoCodec() {
- return true;
-}
-
-void ACMOpus::SplitStereoPacket(uint8_t* /*payload*/,
- int32_t* /*payload_length*/) {}
-
-#else //===================== Actual Implementation =======================
-
-ACMOpus::ACMOpus(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL),
- sample_freq_(32000), // Default sampling frequency.
- bitrate_(20000), // Default bit-rate.
- channels_(1) { // Default mono
- codec_id_ = codec_id;
-
- // Opus has internal DTX, but we don't use it for now.
- has_internal_dtx_ = false;
-
- if (codec_id_ != ACMCodecDB::kOpus) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Wrong codec id for Opus.");
- sample_freq_ = -1;
- bitrate_ = -1;
- }
- return;
-}
-
-ACMOpus::~ACMOpus() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcOpus_EncoderFree(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- WebRtcOpus_DecoderFree(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- return;
-}
-
-int16_t ACMOpus::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- // Call Encoder.
- *bitstream_len_byte = WebRtcOpus_Encode(encoder_inst_ptr_,
- &in_audio_[in_audio_ix_read_],
- frame_len_smpl_,
- MAX_PAYLOAD_SIZE_BYTE, bitstream);
- // Check for error reported from encoder.
- if (*bitstream_len_byte < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "InternalEncode: Encode error for Opus");
- *bitstream_len_byte = 0;
- return -1;
- }
-
- // Increment the read index. This tells the caller how far
- // we have gone forward in reading the audio buffer.
- in_audio_ix_read_ += frame_len_smpl_ * channels_;
-
- return *bitstream_len_byte;
-}
-
-int16_t ACMOpus::DecodeSafe(uint8_t* bitstream, int16_t bitstream_len_byte,
- int16_t* audio, int16_t* audio_samples,
- int8_t* speech_type) {
- return 0;
-}
-
-int16_t ACMOpus::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
- int16_t ret;
- if (encoder_inst_ptr_ != NULL) {
- WebRtcOpus_EncoderFree(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- ret = WebRtcOpus_EncoderCreate(&encoder_inst_ptr_,
- codec_params->codec_inst.channels);
- // Store number of channels.
- channels_ = codec_params->codec_inst.channels;
-
- if (ret < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Encoder creation failed for Opus");
- return ret;
- }
- ret = WebRtcOpus_SetBitRate(encoder_inst_ptr_,
- codec_params->codec_inst.rate);
- if (ret < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Setting initial bitrate failed for Opus");
- return ret;
- }
-
- // Store bitrate.
- bitrate_ = codec_params->codec_inst.rate;
-
- return 0;
-}
-
-int16_t ACMOpus::InternalInitDecoder(WebRtcACMCodecParams* codec_params) {
- if (decoder_inst_ptr_ == NULL) {
- if (WebRtcOpus_DecoderCreate(&decoder_inst_ptr_,
- codec_params->codec_inst.channels) < 0) {
- return -1;
- }
- }
-
- // Number of channels in decoder should match the number in |codec_params|.
- assert(codec_params->codec_inst.channels ==
- WebRtcOpus_DecoderChannels(decoder_inst_ptr_));
-
- if (WebRtcOpus_DecoderInit(decoder_inst_ptr_) < 0) {
- return -1;
- }
- if (WebRtcOpus_DecoderInitSlave(decoder_inst_ptr_) < 0) {
- return -1;
- }
- return 0;
-}
-
-int32_t ACMOpus::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "CodeDef: Decoder uninitialized for Opus");
- return -1;
- }
-
- // Fill up the structure by calling "SET_CODEC_PAR" & "SET_OPUS_FUNCTION."
- // Then call NetEQ to add the codec to its database.
- // TODO(tlegrand): Decoder is registered in NetEQ as a 32 kHz decoder, which
- // is true until we have a full 48 kHz system, and remove the downsampling
- // in the Opus decoder wrapper.
- SET_CODEC_PAR(codec_def, kDecoderOpus, codec_inst.pltype,
- decoder_inst_ptr_, 32000);
-
- // If this is the master of NetEQ, regular decoder will be added, otherwise
- // the slave decoder will be used.
- if (is_master_) {
- SET_OPUS_FUNCTIONS(codec_def);
- } else {
- SET_OPUSSLAVE_FUNCTIONS(codec_def);
- }
-
- return 0;
-}
-
-ACMGenericCodec* ACMOpus::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMOpus::InternalCreateEncoder() {
- // Real encoder will be created in InternalInitEncoder.
- return 0;
-}
-
-void ACMOpus::DestructEncoderSafe() {
- if (encoder_inst_ptr_) {
- WebRtcOpus_EncoderFree(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
-}
-
-int16_t ACMOpus::InternalCreateDecoder() {
- // Real decoder will be created in InternalInitDecoder
- return 0;
-}
-
-void ACMOpus::DestructDecoderSafe() {
- decoder_initialized_ = false;
- if (decoder_inst_ptr_) {
- WebRtcOpus_DecoderFree(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
-}
-
-void ACMOpus::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WebRtcOpus_EncoderFree(reinterpret_cast<OpusEncInst*>(ptr_inst));
- }
- return;
-}
-
-int16_t ACMOpus::SetBitRateSafe(const int32_t rate) {
- if (rate < 6000 || rate > 510000) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "SetBitRateSafe: Invalid rate Opus");
- return -1;
- }
-
- bitrate_ = rate;
-
- // Ask the encoder for the new rate.
- if (WebRtcOpus_SetBitRate(encoder_inst_ptr_, bitrate_) >= 0) {
- encoder_params_.codec_inst.rate = bitrate_;
- return 0;
- }
-
- return -1;
-}
-
-bool ACMOpus::IsTrueStereoCodec() {
- return true;
-}
-
-// Copy the stereo packet so that NetEq will insert into both master and slave.
-void ACMOpus::SplitStereoPacket(uint8_t* payload, int32_t* payload_length) {
- // Check for valid inputs.
- assert(payload != NULL);
- assert(*payload_length > 0);
-
- // Duplicate the payload.
- memcpy(&payload[*payload_length], &payload[0],
- sizeof(uint8_t) * (*payload_length));
- // Double the size of the packet.
- *payload_length *= 2;
-}
-
-#endif // WEBRTC_CODEC_OPUS
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_opus.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_opus.h
deleted file mode 100644
index 1e586ff41ad..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_opus.h
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_OPUS_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_OPUS_H_
-
-#include "webrtc/common_audio/resampler/include/resampler.h"
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-struct WebRtcOpusEncInst;
-struct WebRtcOpusDecInst;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMOpus : public ACMGenericCodec {
- public:
- explicit ACMOpus(int16_t codec_id);
- virtual ~ACMOpus();
-
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- virtual int16_t SetBitRateSafe(const int32_t rate) OVERRIDE;
-
- virtual bool IsTrueStereoCodec() OVERRIDE;
-
- virtual void SplitStereoPacket(uint8_t* payload,
- int32_t* payload_length) OVERRIDE;
-
- WebRtcOpusEncInst* encoder_inst_ptr_;
- WebRtcOpusDecInst* decoder_inst_ptr_;
- uint16_t sample_freq_;
- uint32_t bitrate_;
- int channels_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_OPUS_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcm16b.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcm16b.cc
deleted file mode 100644
index 6fe12f757ca..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcm16b.cc
+++ /dev/null
@@ -1,251 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_pcm16b.h"
-
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_PCM16
-#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_PCM16
-
-ACMPCM16B::ACMPCM16B(int16_t /* codec_id */) {
- return;
-}
-
-ACMPCM16B::~ACMPCM16B() {
- return;
-}
-
-int16_t ACMPCM16B::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMPCM16B::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMPCM16B::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMPCM16B::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMPCM16B::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMPCM16B::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMPCM16B::InternalCreateEncoder() {
- return -1;
-}
-
-int16_t ACMPCM16B::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMPCM16B::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-void ACMPCM16B::DestructEncoderSafe() {
- return;
-}
-
-void ACMPCM16B::DestructDecoderSafe() {
- return;
-}
-
-void ACMPCM16B::SplitStereoPacket(uint8_t* /*payload*/,
- int32_t* /*payload_length*/) {
-}
-
-#else //===================== Actual Implementation =======================
-ACMPCM16B::ACMPCM16B(int16_t codec_id) {
- codec_id_ = codec_id;
- sampling_freq_hz_ = ACMCodecDB::CodecFreq(codec_id_);
-}
-
-ACMPCM16B::~ACMPCM16B() {
- return;
-}
-
-int16_t ACMPCM16B::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- *bitstream_len_byte = WebRtcPcm16b_Encode(&in_audio_[in_audio_ix_read_],
- frame_len_smpl_ * num_channels_,
- bitstream);
- // Increment the read index to tell the caller that how far
- // we have gone forward in reading the audio buffer.
- in_audio_ix_read_ += frame_len_smpl_ * num_channels_;
- return *bitstream_len_byte;
-}
-
-int16_t ACMPCM16B::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMPCM16B::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization, PCM has no instance.
- return 0;
-}
-
-int16_t ACMPCM16B::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization, PCM has no instance.
- return 0;
-}
-
-int32_t ACMPCM16B::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- // Fill up the structure by calling "SET_CODEC_PAR" & "SET_PCMU_FUNCTION".
- // Then call NetEQ to add the codec to it's database.
- if (codec_inst.channels == 1) {
- switch (sampling_freq_hz_) {
- case 8000: {
- SET_CODEC_PAR(codec_def, kDecoderPCM16B, codec_inst.pltype, NULL, 8000);
- SET_PCM16B_FUNCTIONS(codec_def);
- break;
- }
- case 16000: {
- SET_CODEC_PAR(codec_def, kDecoderPCM16Bwb, codec_inst.pltype, NULL,
- 16000);
- SET_PCM16B_WB_FUNCTIONS(codec_def);
- break;
- }
- case 32000: {
- SET_CODEC_PAR(codec_def, kDecoderPCM16Bswb32kHz, codec_inst.pltype,
- NULL, 32000);
- SET_PCM16B_SWB32_FUNCTIONS(codec_def);
- break;
- }
- default: {
- return -1;
- }
- }
- } else {
- switch (sampling_freq_hz_) {
- case 8000: {
- SET_CODEC_PAR(codec_def, kDecoderPCM16B_2ch, codec_inst.pltype, NULL,
- 8000);
- SET_PCM16B_FUNCTIONS(codec_def);
- break;
- }
- case 16000: {
- SET_CODEC_PAR(codec_def, kDecoderPCM16Bwb_2ch, codec_inst.pltype,
- NULL, 16000);
- SET_PCM16B_WB_FUNCTIONS(codec_def);
- break;
- }
- case 32000: {
- SET_CODEC_PAR(codec_def, kDecoderPCM16Bswb32kHz_2ch, codec_inst.pltype,
- NULL, 32000);
- SET_PCM16B_SWB32_FUNCTIONS(codec_def);
- break;
- }
- default: {
- return -1;
- }
- }
- }
- return 0;
-}
-
-ACMGenericCodec* ACMPCM16B::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMPCM16B::InternalCreateEncoder() {
- // PCM has no instance.
- return 0;
-}
-
-int16_t ACMPCM16B::InternalCreateDecoder() {
- // PCM has no instance.
- return 0;
-}
-
-void ACMPCM16B::InternalDestructEncoderInst(void* /* ptr_inst */) {
- // PCM has no instance.
- return;
-}
-
-void ACMPCM16B::DestructEncoderSafe() {
- // PCM has no instance.
- encoder_exist_ = false;
- encoder_initialized_ = false;
- return;
-}
-
-void ACMPCM16B::DestructDecoderSafe() {
- // PCM has no instance.
- decoder_exist_ = false;
- decoder_initialized_ = false;
- return;
-}
-
-// Split the stereo packet and place left and right channel after each other
-// in the payload vector.
-void ACMPCM16B::SplitStereoPacket(uint8_t* payload, int32_t* payload_length) {
- uint8_t right_byte_msb;
- uint8_t right_byte_lsb;
-
- // Check for valid inputs.
- assert(payload != NULL);
- assert(*payload_length > 0);
-
- // Move two bytes representing right channel each loop, and place it at the
- // end of the bytestream vector. After looping the data is reordered to:
- // l1 l2 l3 l4 ... l(N-1) lN r1 r2 r3 r4 ... r(N-1) r(N),
- // where N is the total number of samples.
-
- for (int i = 0; i < *payload_length / 2; i += 2) {
- right_byte_msb = payload[i + 2];
- right_byte_lsb = payload[i + 3];
- memmove(&payload[i + 2], &payload[i + 4], *payload_length - i - 4);
- payload[*payload_length - 2] = right_byte_msb;
- payload[*payload_length - 1] = right_byte_lsb;
- }
-}
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcm16b.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcm16b.h
deleted file mode 100644
index a97589b57a9..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcm16b.h
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCM16B_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCM16B_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMPCM16B : public ACMGenericCodec {
- public:
- explicit ACMPCM16B(int16_t codec_id);
- virtual ~ACMPCM16B();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- virtual void SplitStereoPacket(uint8_t* payload,
- int32_t* payload_length) OVERRIDE;
-
- int32_t sampling_freq_hz_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCM16B_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcma.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcma.cc
deleted file mode 100644
index 9e5514a9e9e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcma.cc
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_pcma.h"
-
-#include "webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-// Codec interface
-
-namespace webrtc {
-
-namespace acm1 {
-
-ACMPCMA::ACMPCMA(int16_t codec_id) {
- codec_id_ = codec_id;
-}
-
-ACMPCMA::~ACMPCMA() {
- return;
-}
-
-int16_t ACMPCMA::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- *bitstream_len_byte = WebRtcG711_EncodeA(NULL, &in_audio_[in_audio_ix_read_],
- frame_len_smpl_ * num_channels_,
- (int16_t*) bitstream);
- // Increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer.
- in_audio_ix_read_ += frame_len_smpl_ * num_channels_;
- return *bitstream_len_byte;
-}
-
-int16_t ACMPCMA::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMPCMA::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization, PCM has no instance.
- return 0;
-}
-
-int16_t ACMPCMA::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization, PCM has no instance.
- return 0;
-}
-
-int32_t ACMPCMA::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_PCMA_FUNCTION."
- // Then call NetEQ to add the codec to it's database.
- if (codec_inst.channels == 1) {
- // Mono mode.
- SET_CODEC_PAR(codec_def, kDecoderPCMa, codec_inst.pltype, NULL, 8000);
- } else {
- // Stereo mode.
- SET_CODEC_PAR(codec_def, kDecoderPCMa_2ch, codec_inst.pltype, NULL, 8000);
- }
- SET_PCMA_FUNCTIONS(codec_def);
- return 0;
-}
-
-ACMGenericCodec* ACMPCMA::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMPCMA::InternalCreateEncoder() {
- // PCM has no instance.
- return 0;
-}
-
-int16_t ACMPCMA::InternalCreateDecoder() {
- // PCM has no instance.
- return 0;
-}
-
-void ACMPCMA::InternalDestructEncoderInst(void* /* ptr_inst */) {
- // PCM has no instance.
- return;
-}
-
-void ACMPCMA::DestructEncoderSafe() {
- // PCM has no instance.
- return;
-}
-
-void ACMPCMA::DestructDecoderSafe() {
- // PCM has no instance.
- decoder_initialized_ = false;
- decoder_exist_ = false;
- return;
-}
-
-// Split the stereo packet and place left and right channel after each other
-// in the payload vector.
-void ACMPCMA::SplitStereoPacket(uint8_t* payload, int32_t* payload_length) {
- uint8_t right_byte;
-
- // Check for valid inputs.
- assert(payload != NULL);
- assert(*payload_length > 0);
-
- // Move one bytes representing right channel each loop, and place it at the
- // end of the bytestream vector. After looping the data is reordered to:
- // l1 l2 l3 l4 ... l(N-1) lN r1 r2 r3 r4 ... r(N-1) r(N),
- // where N is the total number of samples.
- for (int i = 0; i < *payload_length / 2; i++) {
- right_byte = payload[i + 1];
- memmove(&payload[i + 1], &payload[i + 2], *payload_length - i - 2);
- payload[*payload_length - 1] = right_byte;
- }
-}
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcma.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcma.h
deleted file mode 100644
index cb506eaa6e9..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcma.h
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMA_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMA_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMPCMA : public ACMGenericCodec {
- public:
- explicit ACMPCMA(int16_t codec_id);
- virtual ~ACMPCMA();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- virtual void SplitStereoPacket(uint8_t* payload,
- int32_t* payload_length) OVERRIDE;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMA_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcmu.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcmu.cc
deleted file mode 100644
index 6f4eb27aa7a..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcmu.cc
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_pcmu.h"
-
-#include "webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-// Codec interface
-
-namespace webrtc {
-
-namespace acm1 {
-
-ACMPCMU::ACMPCMU(int16_t codec_id) {
- codec_id_ = codec_id;
-}
-
-ACMPCMU::~ACMPCMU() {
- return;
-}
-
-int16_t ACMPCMU::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- *bitstream_len_byte = WebRtcG711_EncodeU(NULL, &in_audio_[in_audio_ix_read_],
- frame_len_smpl_ * num_channels_,
- (int16_t*)bitstream);
- // Increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer.
- in_audio_ix_read_ += frame_len_smpl_ * num_channels_;
- return *bitstream_len_byte;
-}
-
-int16_t ACMPCMU::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMPCMU::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization, PCM has no instance.
- return 0;
-}
-
-int16_t ACMPCMU::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization, PCM has no instance.
- return 0;
-}
-
-int32_t ACMPCMU::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_PCMU_FUNCTION."
- // Then call NetEQ to add the codec to it's database.
- if (codec_inst.channels == 1) {
- // Mono mode.
- SET_CODEC_PAR(codec_def, kDecoderPCMu, codec_inst.pltype, NULL, 8000);
- } else {
- // Stereo mode.
- SET_CODEC_PAR(codec_def, kDecoderPCMu_2ch, codec_inst.pltype, NULL, 8000);
- }
- SET_PCMU_FUNCTIONS(codec_def);
- return 0;
-}
-
-ACMGenericCodec* ACMPCMU::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMPCMU::InternalCreateEncoder() {
- // PCM has no instance.
- return 0;
-}
-
-int16_t ACMPCMU::InternalCreateDecoder() {
- // PCM has no instance.
- return 0;
-}
-
-void ACMPCMU::InternalDestructEncoderInst(void* /* ptr_inst */) {
- // PCM has no instance.
- return;
-}
-
-void ACMPCMU::DestructEncoderSafe() {
- // PCM has no instance.
- encoder_exist_ = false;
- encoder_initialized_ = false;
- return;
-}
-
-void ACMPCMU::DestructDecoderSafe() {
- // PCM has no instance.
- decoder_initialized_ = false;
- decoder_exist_ = false;
- return;
-}
-
-// Split the stereo packet and place left and right channel after each other
-// in the payload vector.
-void ACMPCMU::SplitStereoPacket(uint8_t* payload, int32_t* payload_length) {
- uint8_t right_byte;
-
- // Check for valid inputs.
- assert(payload != NULL);
- assert(*payload_length > 0);
-
- // Move one bytes representing right channel each loop, and place it at the
- // end of the bytestream vector. After looping the data is reordered to:
- // l1 l2 l3 l4 ... l(N-1) lN r1 r2 r3 r4 ... r(N-1) r(N),
- // where N is the total number of samples.
- for (int i = 0; i < *payload_length / 2; i++) {
- right_byte = payload[i + 1];
- memmove(&payload[i + 1], &payload[i + 2], *payload_length - i - 2);
- payload[*payload_length - 1] = right_byte;
- }
-}
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcmu.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcmu.h
deleted file mode 100644
index ea401d59c96..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_pcmu.h
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMU_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMU_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMPCMU : public ACMGenericCodec {
- public:
- explicit ACMPCMU(int16_t codec_id);
- virtual ~ACMPCMU();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-
- virtual void SplitStereoPacket(uint8_t* payload,
- int32_t* payload_length) OVERRIDE;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMU_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_red.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_red.cc
deleted file mode 100644
index 0d8134c171a..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_red.cc
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_red.h"
-
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-ACMRED::ACMRED(int16_t codec_id) {
- codec_id_ = codec_id;
-}
-
-ACMRED::~ACMRED() {
- return;
-}
-
-int16_t ACMRED::InternalEncode(uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- // RED is never used as an encoder
- // RED has no instance
- return 0;
-}
-
-int16_t ACMRED::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMRED::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization,
- // RED has no instance
- return 0;
-}
-
-int16_t ACMRED::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- // This codec does not need initialization,
- // RED has no instance
- return 0;
-}
-
-int32_t ACMRED::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- // Todo:
- // log error
- return -1;
- }
-
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_PCMU_FUNCTION."
- // Then call NetEQ to add the codec to it's
- // database.
- SET_CODEC_PAR((codec_def), kDecoderRED, codec_inst.pltype, NULL, 8000);
- SET_RED_FUNCTIONS((codec_def));
- return 0;
-}
-
-ACMGenericCodec* ACMRED::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMRED::InternalCreateEncoder() {
- // RED has no instance
- return 0;
-}
-
-int16_t ACMRED::InternalCreateDecoder() {
- // RED has no instance
- return 0;
-}
-
-void ACMRED::InternalDestructEncoderInst(void* /* ptr_inst */) {
- // RED has no instance
- return;
-}
-
-void ACMRED::DestructEncoderSafe() {
- // RED has no instance
- return;
-}
-
-void ACMRED::DestructDecoderSafe() {
- // RED has no instance
- return;
-}
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_red.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_red.h
deleted file mode 100644
index ede18b5218e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_red.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RED_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RED_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMRED : public ACMGenericCodec {
- public:
- explicit ACMRED(int16_t codec_id);
- virtual ~ACMRED();
-
- // for FEC
- virtual ACMGenericCodec* CreateInstance(void) OVERRIDE;
-
- virtual int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) OVERRIDE;
-
- virtual int16_t InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- virtual int16_t InternalInitDecoder(
- WebRtcACMCodecParams* codec_params) OVERRIDE;
-
- protected:
- virtual int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type) OVERRIDE;
-
- virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) OVERRIDE;
-
- virtual void DestructEncoderSafe() OVERRIDE;
-
- virtual void DestructDecoderSafe() OVERRIDE;
-
- virtual int16_t InternalCreateEncoder() OVERRIDE;
-
- virtual int16_t InternalCreateDecoder() OVERRIDE;
-
- virtual void InternalDestructEncoderInst(void* ptr_inst) OVERRIDE;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RED_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_resampler.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_resampler.cc
deleted file mode 100644
index 50ddab1d8b9..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_resampler.cc
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_resampler.h"
-
-#include <string.h>
-
-#include "webrtc/common_audio/resampler/include/push_resampler.h"
-#include "webrtc/system_wrappers/interface/logging.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-ACMResampler::ACMResampler() {
-}
-
-ACMResampler::~ACMResampler() {
-}
-
-int16_t ACMResampler::Resample10Msec(const int16_t* in_audio,
- int32_t in_freq_hz,
- int16_t* out_audio,
- int32_t out_freq_hz,
- uint8_t num_audio_channels) {
- if (in_freq_hz == out_freq_hz) {
- size_t length = static_cast<size_t>(in_freq_hz * num_audio_channels / 100);
- memcpy(out_audio, in_audio, length * sizeof(int16_t));
- return static_cast<int16_t>(in_freq_hz / 100);
- }
-
- // |max_length| is the maximum number of samples for 10ms at 48kHz.
- // TODO(turajs): is this actually the capacity of the |out_audio| buffer?
- int max_length = 480 * num_audio_channels;
- int in_length = in_freq_hz / 100 * num_audio_channels;
-
- if (resampler_.InitializeIfNeeded(in_freq_hz, out_freq_hz,
- num_audio_channels) != 0) {
- LOG_FERR3(LS_ERROR, InitializeIfNeeded, in_freq_hz, out_freq_hz,
- num_audio_channels);
- return -1;
- }
-
- int out_length = resampler_.Resample(in_audio, in_length, out_audio,
- max_length);
- if (out_length == -1) {
- LOG_FERR4(LS_ERROR, Resample, in_audio, in_length, out_audio, max_length);
- return -1;
- }
-
- return out_length / num_audio_channels;
-}
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_resampler.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_resampler.h
deleted file mode 100644
index b50e722c443..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_resampler.h
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RESAMPLER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RESAMPLER_H_
-
-#include "webrtc/common_audio/resampler/include/push_resampler.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMResampler {
- public:
- ACMResampler();
- ~ACMResampler();
-
- int16_t Resample10Msec(const int16_t* in_audio,
- const int32_t in_freq_hz,
- int16_t* out_audio,
- const int32_t out_freq_hz,
- uint8_t num_audio_channels);
-
- private:
- PushResampler resampler_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RESAMPLER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_speex.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_speex.cc
deleted file mode 100644
index 1567929d860..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_speex.cc
+++ /dev/null
@@ -1,471 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/acm_speex.h"
-
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-#ifdef WEBRTC_CODEC_SPEEX
-// NOTE! Speex is not included in the open-source package. Modify this file or
-// your codec API to match the function calls and names of used Speex API file.
-#include "speex_interface.h"
-#endif
-
-namespace webrtc {
-
-namespace acm1 {
-
-#ifndef WEBRTC_CODEC_SPEEX
-ACMSPEEX::ACMSPEEX(int16_t /* codec_id */)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL),
- compl_mode_(0),
- vbr_enabled_(false),
- encoding_rate_(-1),
- sampling_frequency_(-1),
- samples_in_20ms_audio_(-1) {
- return;
-}
-
-ACMSPEEX::~ACMSPEEX() {
- return;
-}
-
-int16_t ACMSPEEX::InternalEncode(
- uint8_t* /* bitstream */,
- int16_t* /* bitstream_len_byte */) {
- return -1;
-}
-
-int16_t ACMSPEEX::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return -1;
-}
-
-int16_t ACMSPEEX::EnableDTX() {
- return -1;
-}
-
-int16_t ACMSPEEX::DisableDTX() {
- return -1;
-}
-
-int16_t ACMSPEEX::InternalInitEncoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int16_t ACMSPEEX::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- return -1;
-}
-
-int32_t ACMSPEEX::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
- return -1;
-}
-
-ACMGenericCodec* ACMSPEEX::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMSPEEX::InternalCreateEncoder() {
- return -1;
-}
-
-void ACMSPEEX::DestructEncoderSafe() {
- return;
-}
-
-int16_t ACMSPEEX::InternalCreateDecoder() {
- return -1;
-}
-
-void ACMSPEEX::DestructDecoderSafe() {
- return;
-}
-
-int16_t ACMSPEEX::SetBitRateSafe(const int32_t /* rate */) {
- return -1;
-}
-
-void ACMSPEEX::InternalDestructEncoderInst(void* /* ptr_inst */) {
- return;
-}
-
-#ifdef UNUSEDSPEEX
-int16_t ACMSPEEX::EnableVBR() {
- return -1;
-}
-
-int16_t ACMSPEEX::DisableVBR() {
- return -1;
-}
-
-int16_t ACMSPEEX::SetComplMode(int16_t mode) {
- return -1;
-}
-#endif
-
-#else //===================== Actual Implementation =======================
-
-ACMSPEEX::ACMSPEEX(int16_t codec_id)
- : encoder_inst_ptr_(NULL),
- decoder_inst_ptr_(NULL) {
- codec_id_ = codec_id;
-
- // Set sampling frequency, frame size and rate Speex
- if (codec_id_ == ACMCodecDB::kSPEEX8) {
- sampling_frequency_ = 8000;
- samples_in_20ms_audio_ = 160;
- encoding_rate_ = 11000;
- } else if (codec_id_ == ACMCodecDB::kSPEEX16) {
- sampling_frequency_ = 16000;
- samples_in_20ms_audio_ = 320;
- encoding_rate_ = 22000;
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Wrong codec id for Speex.");
-
- sampling_frequency_ = -1;
- samples_in_20ms_audio_ = -1;
- encoding_rate_ = -1;
- }
-
- has_internal_dtx_ = true;
- dtx_enabled_ = false;
- vbr_enabled_ = false;
- compl_mode_ = 3; // default complexity value
-
- return;
-}
-
-ACMSPEEX::~ACMSPEEX() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcSpeex_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- if (decoder_inst_ptr_ != NULL) {
- WebRtcSpeex_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- return;
-}
-
-int16_t ACMSPEEX::InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte) {
- int16_t status;
- int16_t num_encoded_samples = 0;
- int16_t n = 0;
-
- while (num_encoded_samples < frame_len_smpl_) {
- status = WebRtcSpeex_Encode(encoder_inst_ptr_,
- &in_audio_[in_audio_ix_read_], encoding_rate_);
-
- // increment the read index this tell the caller that how far
- // we have gone forward in reading the audio buffer
- in_audio_ix_read_ += samples_in_20ms_audio_;
- num_encoded_samples += samples_in_20ms_audio_;
-
- if (status < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Error in Speex encoder");
- return status;
- }
-
- // Update VAD, if internal DTX is used
- if (has_internal_dtx_ && dtx_enabled_) {
- vad_label_[n++] = status;
- vad_label_[n++] = status;
- }
-
- if (status == 0) {
- // This frame is detected as inactive. We need send whatever
- // encoded so far.
- *bitstream_len_byte = WebRtcSpeex_GetBitstream(encoder_inst_ptr_,
- (int16_t*)bitstream);
- return *bitstream_len_byte;
- }
- }
-
- *bitstream_len_byte = WebRtcSpeex_GetBitstream(encoder_inst_ptr_,
- (int16_t*)bitstream);
- return *bitstream_len_byte;
-}
-
-int16_t ACMSPEEX::DecodeSafe(uint8_t* /* bitstream */,
- int16_t /* bitstream_len_byte */,
- int16_t* /* audio */,
- int16_t* /* audio_samples */,
- int8_t* /* speech_type */) {
- return 0;
-}
-
-int16_t ACMSPEEX::EnableDTX() {
- if (dtx_enabled_) {
- return 0;
- } else if (encoder_exist_) { // check if encoder exist
- // enable DTX
- if (WebRtcSpeex_EncoderInit(encoder_inst_ptr_, (vbr_enabled_ ? 1 : 0),
- compl_mode_, 1) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Cannot enable DTX for Speex");
- return -1;
- }
- dtx_enabled_ = true;
- return 0;
- } else {
- return -1;
- }
-
- return 0;
-}
-
-int16_t ACMSPEEX::DisableDTX() {
- if (!dtx_enabled_) {
- return 0;
- } else if (encoder_exist_) { // check if encoder exist
- // disable DTX
- if (WebRtcSpeex_EncoderInit(encoder_inst_ptr_, (vbr_enabled_ ? 1 : 0),
- compl_mode_, 0) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Cannot disable DTX for Speex");
- return -1;
- }
- dtx_enabled_ = false;
- return 0;
- } else {
- // encoder doesn't exists, therefore disabling is harmless
- return 0;
- }
-
- return 0;
-}
-
-int16_t ACMSPEEX::InternalInitEncoder(
- WebRtcACMCodecParams* codec_params) {
- // sanity check
- if (encoder_inst_ptr_ == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Cannot initialize Speex encoder, instance does not exist");
- return -1;
- }
-
- int16_t status = SetBitRateSafe((codec_params->codecInstant).rate);
- status +=
- (WebRtcSpeex_EncoderInit(encoder_inst_ptr_, vbr_enabled_, compl_mode_,
- ((codec_params->enable_dtx) ? 1 : 0)) < 0) ?
- -1 : 0;
-
- if (status >= 0) {
- return 0;
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Error in initialization of Speex encoder");
- return -1;
- }
-}
-
-int16_t ACMSPEEX::InternalInitDecoder(
- WebRtcACMCodecParams* /* codec_params */) {
- int16_t status;
-
- // sanity check
- if (decoder_inst_ptr_ == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Cannot initialize Speex decoder, instance does not exist");
- return -1;
- }
- status = ((WebRtcSpeex_DecoderInit(decoder_inst_ptr_) < 0) ? -1 : 0);
-
- if (status >= 0) {
- return 0;
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Error in initialization of Speex decoder");
- return -1;
- }
-}
-
-int32_t ACMSPEEX::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
- if (!decoder_initialized_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Error, Speex decoder is not initialized");
- return -1;
- }
-
- // Fill up the structure by calling
- // "SET_CODEC_PAR" & "SET_SPEEX_FUNCTION."
- // Then call NetEQ to add the codec to its
- // database.
-
- switch (sampling_frequency_) {
- case 8000: {
- SET_CODEC_PAR((codec_def), kDecoderSPEEX_8, codec_inst.pltype,
- decoder_inst_ptr_, 8000);
- break;
- }
- case 16000: {
- SET_CODEC_PAR((codec_def), kDecoderSPEEX_16, codec_inst.pltype,
- decoder_inst_ptr_, 16000);
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Unsupported sampling frequency for Speex");
-
- return -1;
- }
- }
-
- SET_SPEEX_FUNCTIONS((codec_def));
- return 0;
-}
-
-ACMGenericCodec* ACMSPEEX::CreateInstance(void) {
- return NULL;
-}
-
-int16_t ACMSPEEX::InternalCreateEncoder() {
- return WebRtcSpeex_CreateEnc(&encoder_inst_ptr_, sampling_frequency_);
-}
-
-void ACMSPEEX::DestructEncoderSafe() {
- if (encoder_inst_ptr_ != NULL) {
- WebRtcSpeex_FreeEnc(encoder_inst_ptr_);
- encoder_inst_ptr_ = NULL;
- }
- // there is no encoder set the following
- encoder_exist_ = false;
- encoder_initialized_ = false;
- encoding_rate_ = 0;
-}
-
-int16_t ACMSPEEX::InternalCreateDecoder() {
- return WebRtcSpeex_CreateDec(&decoder_inst_ptr_, sampling_frequency_, 1);
-}
-
-void ACMSPEEX::DestructDecoderSafe() {
- if (decoder_inst_ptr_ != NULL) {
- WebRtcSpeex_FreeDec(decoder_inst_ptr_);
- decoder_inst_ptr_ = NULL;
- }
- // there is no encoder instance set the followings
- decoder_exist_ = false;
- decoder_initialized_ = false;
-}
-
-int16_t ACMSPEEX::SetBitRateSafe(const int32_t rate) {
- // Check if changed rate
- if (rate == encoding_rate_) {
- return 0;
- } else if (rate > 2000) {
- encoding_rate_ = rate;
- encoder_params_.codecInstant.rate = rate;
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Unsupported encoding rate for Speex");
-
- return -1;
- }
-
- return 0;
-}
-
-void ACMSPEEX::InternalDestructEncoderInst(void* ptr_inst) {
- if (ptr_inst != NULL) {
- WebRtcSpeex_FreeEnc((SPEEX_encinst_t_*) ptr_inst);
- }
- return;
-}
-
-#ifdef UNUSEDSPEEX
-
-// This API is currently not in use. If requested to be able to enable/disable
-// VBR an ACM API need to be added.
-int16_t ACMSPEEX::EnableVBR() {
- if (vbr_enabled_) {
- return 0;
- } else if (encoder_exist_) { // check if encoder exist
- // enable Variable Bit Rate (VBR)
- if (WebRtcSpeex_EncoderInit(encoder_inst_ptr_, 1, compl_mode_,
- (dtx_enabled_ ? 1 : 0)) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Cannot enable VBR mode for Speex");
-
- return -1;
- }
- vbr_enabled_ = true;
- return 0;
- } else {
- return -1;
- }
-}
-
-// This API is currently not in use. If requested to be able to enable/disable
-// VBR an ACM API need to be added.
-int16_t ACMSPEEX::DisableVBR() {
- if (!vbr_enabled_) {
- return 0;
- } else if (encoder_exist_) { // check if encoder exist
- // disable DTX
- if (WebRtcSpeex_EncoderInit(encoder_inst_ptr_, 0, compl_mode_,
- (dtx_enabled_ ? 1 : 0)) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Cannot disable DTX for Speex");
-
- return -1;
- }
- vbr_enabled_ = false;
- return 0;
- } else {
- // encoder doesn't exists, therefore disabling is harmless
- return 0;
- }
-}
-
-// This API is currently not in use. If requested to be able to set complexity
-// an ACM API need to be added.
-int16_t ACMSPEEX::SetComplMode(int16_t mode) {
- // Check if new mode
- if (mode == compl_mode_) {
- return 0;
- } else if (encoder_exist_) { // check if encoder exist
- // Set new mode
- if (WebRtcSpeex_EncoderInit(encoder_inst_ptr_, 0, mode,
- (dtx_enabled_ ? 1 : 0)) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
- "Error in complexity mode for Speex");
- return -1;
- }
- compl_mode_ = mode;
- return 0;
- } else {
- // encoder doesn't exists, therefore disabling is harmless
- return 0;
- }
-}
-
-#endif
-
-#endif
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_speex.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_speex.h
deleted file mode 100644
index 762aea8d9c2..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/acm_speex.h
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_SPEEX_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_SPEEX_H_
-
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-
-// forward declaration
-struct SPEEX_encinst_t_;
-struct SPEEX_decinst_t_;
-
-namespace webrtc {
-
-namespace acm1 {
-
-class ACMSPEEX : public ACMGenericCodec {
- public:
- explicit ACMSPEEX(int16_t codec_id);
- ~ACMSPEEX();
-
- // for FEC
- ACMGenericCodec* CreateInstance(void);
-
- int16_t InternalEncode(uint8_t* bitstream,
- int16_t* bitstream_len_byte);
-
- int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
-
- int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
-
- protected:
- int16_t DecodeSafe(uint8_t* bitstream,
- int16_t bitstream_len_byte,
- int16_t* audio,
- int16_t* audio_samples,
- int8_t* speech_type);
-
- int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
-
- void DestructEncoderSafe();
-
- void DestructDecoderSafe();
-
- int16_t InternalCreateEncoder();
-
- int16_t InternalCreateDecoder();
-
- void InternalDestructEncoderInst(void* ptr_inst);
-
- int16_t SetBitRateSafe(const int32_t rate);
-
- int16_t EnableDTX();
-
- int16_t DisableDTX();
-
-#ifdef UNUSEDSPEEX
- int16_t EnableVBR();
-
- int16_t DisableVBR();
-
- int16_t SetComplMode(int16_t mode);
-#endif
-
- SPEEX_encinst_t_* encoder_inst_ptr_;
- SPEEX_decinst_t_* decoder_inst_ptr_;
- int16_t compl_mode_;
- bool vbr_enabled_;
- int32_t encoding_rate_;
- int16_t sampling_frequency_;
- uint16_t samples_in_20ms_audio_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_SPEEX_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi b/chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi
deleted file mode 100644
index a0389b03ef8..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi
+++ /dev/null
@@ -1,153 +0,0 @@
-# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-{
- 'variables': {
- 'audio_coding_dependencies': [
- 'CNG',
- 'G711',
- 'G722',
- 'iLBC',
- 'iSAC',
- 'iSACFix',
- 'PCM16B',
- 'NetEq',
- '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
- '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
- ],
- 'audio_coding_defines': [],
- 'conditions': [
- ['include_opus==1', {
- 'audio_coding_dependencies': ['webrtc_opus',],
- 'audio_coding_defines': ['WEBRTC_CODEC_OPUS',],
- }],
- ],
- },
- 'targets': [
- {
- 'target_name': 'audio_coding_module',
- 'type': 'static_library',
- 'defines': [
- '<@(audio_coding_defines)',
- ],
- 'dependencies': [
- '<@(audio_coding_dependencies)',
- 'acm2',
- ],
- 'include_dirs': [
- '../interface',
- '../../../interface',
- '<(webrtc_root)',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- '../interface',
- '../../../interface',
- '<(webrtc_root)',
- ],
- },
- 'sources': [
- '../interface/audio_coding_module.h',
- '../interface/audio_coding_module_typedefs.h',
- 'acm_amr.cc',
- 'acm_amr.h',
- 'acm_amrwb.cc',
- 'acm_amrwb.h',
- 'acm_celt.cc',
- 'acm_celt.h',
- 'acm_cng.cc',
- 'acm_cng.h',
- 'acm_codec_database.cc',
- 'acm_codec_database.h',
- 'acm_dtmf_detection.cc',
- 'acm_dtmf_detection.h',
- 'acm_dtmf_playout.cc',
- 'acm_dtmf_playout.h',
- 'acm_g722.cc',
- 'acm_g722.h',
- 'acm_g7221.cc',
- 'acm_g7221.h',
- 'acm_g7221c.cc',
- 'acm_g7221c.h',
- 'acm_g729.cc',
- 'acm_g729.h',
- 'acm_g7291.cc',
- 'acm_g7291.h',
- 'acm_generic_codec.cc',
- 'acm_generic_codec.h',
- 'acm_gsmfr.cc',
- 'acm_gsmfr.h',
- 'acm_ilbc.cc',
- 'acm_ilbc.h',
- 'acm_isac.cc',
- 'acm_isac.h',
- 'acm_isac_macros.h',
- 'acm_neteq.cc',
- 'acm_neteq.h',
- 'acm_opus.cc',
- 'acm_opus.h',
- 'acm_speex.cc',
- 'acm_speex.h',
- 'acm_pcm16b.cc',
- 'acm_pcm16b.h',
- 'acm_pcma.cc',
- 'acm_pcma.h',
- 'acm_pcmu.cc',
- 'acm_pcmu.h',
- 'acm_red.cc',
- 'acm_red.h',
- 'acm_resampler.cc',
- 'acm_resampler.h',
- 'audio_coding_module_impl.cc',
- 'audio_coding_module_impl.h',
- ],
- },
- ],
- 'conditions': [
- ['include_tests==1', {
- 'targets': [
- {
- 'target_name': 'delay_test',
- 'type': 'executable',
- 'dependencies': [
- 'audio_coding_module',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- '<(webrtc_root)/test/test.gyp:test_support',
- '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
- '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
- ],
- 'sources': [
- '../test/delay_test.cc',
- '../test/Channel.cc',
- '../test/PCMFile.cc',
- '../test/utility.cc',
- ],
- }, # delay_test
- {
- 'target_name': 'insert_packet_with_timing',
- 'type': 'executable',
- 'dependencies': [
- 'audio_coding_module',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- '<(webrtc_root)/test/test.gyp:test_support',
- '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
- '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
- ],
- 'sources': [
- '../test/insert_packet_with_timing.cc',
- '../test/Channel.cc',
- '../test/PCMFile.cc',
- ],
- }, # delay_test
- ],
- }],
- ],
- 'includes': [
- '../acm2/audio_coding_module.gypi',
- ],
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.cc b/chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.cc
deleted file mode 100644
index 556f530ecfd..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.cc
+++ /dev/null
@@ -1,3048 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h"
-
-#include <assert.h>
-#include <stdlib.h>
-
-#include <algorithm> // For std::max.
-
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
-#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h"
-#include "webrtc/modules/audio_coding/main/source/acm_dtmf_detection.h"
-#include "webrtc/modules/audio_coding/main/source/acm_generic_codec.h"
-#include "webrtc/modules/audio_coding/main/source/acm_resampler.h"
-#include "webrtc/modules/audio_coding/main/acm2/nack.h"
-#include "webrtc/system_wrappers/interface/clock.h"
-#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/logging.h"
-#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
-#include "webrtc/system_wrappers/interface/tick_util.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-#include "webrtc/system_wrappers/interface/trace_event.h"
-
-namespace webrtc {
-
-namespace acm1 {
-
-enum {
- kACMToneEnd = 999
-};
-
-// Maximum number of bytes in one packet (PCM16B, 20 ms packets, stereo).
-enum {
- kMaxPacketSize = 2560
-};
-
-// Maximum number of payloads that can be packed in one RED payload. For
-// regular FEC, we only pack two payloads. In case of dual-streaming, in worst
-// case we might pack 3 payloads in one RED payload.
-enum {
- kNumFecFragmentationVectors = 2,
- kMaxNumFragmentationVectors = 3
-};
-
-static const uint32_t kMaskTimestamp = 0x03ffffff;
-static const int kDefaultTimestampDiff = 960; // 20 ms @ 48 kHz.
-
-// If packet N is arrived all packets prior to N - |kNackThresholdPackets| which
-// are not received are considered as lost, and appear in NACK list.
-static const int kNackThresholdPackets = 2;
-
-namespace {
-
-bool IsCodecRED(const CodecInst* codec) {
- return (STR_CASE_CMP(codec->plname, "RED") == 0);
-}
-
-bool IsCodecRED(int index) {
- return (IsCodecRED(&ACMCodecDB::database_[index]));
-}
-
-bool IsCodecCN(const CodecInst* codec) {
- return (STR_CASE_CMP(codec->plname, "CN") == 0);
-}
-
-bool IsCodecCN(int index) {
- return (IsCodecCN(&ACMCodecDB::database_[index]));
-}
-
-// Stereo-to-mono can be used as in-place.
-int DownMix(const AudioFrame& frame, int length_out_buff, int16_t* out_buff) {
- if (length_out_buff < frame.samples_per_channel_) {
- return -1;
- }
- for (int n = 0; n < frame.samples_per_channel_; ++n)
- out_buff[n] = (frame.data_[2 * n] + frame.data_[2 * n + 1]) >> 1;
- return 0;
-}
-
-// Mono-to-stereo can be used as in-place.
-int UpMix(const AudioFrame& frame, int length_out_buff, int16_t* out_buff) {
- if (length_out_buff < frame.samples_per_channel_) {
- return -1;
- }
- for (int n = frame.samples_per_channel_ - 1; n >= 0; --n) {
- out_buff[2 * n + 1] = frame.data_[n];
- out_buff[2 * n] = frame.data_[n];
- }
- return 0;
-}
-
-// Return 1 if timestamp t1 is less than timestamp t2, while compensating for
-// wrap-around.
-int TimestampLessThan(uint32_t t1, uint32_t t2) {
- uint32_t kHalfFullRange = static_cast<uint32_t>(0xFFFFFFFF) / 2;
- if (t1 == t2) {
- return 0;
- } else if (t1 < t2) {
- if (t2 - t1 < kHalfFullRange)
- return 1;
- return 0;
- } else {
- if (t1 - t2 < kHalfFullRange)
- return 0;
- return 1;
- }
-}
-
-} // namespace
-
-AudioCodingModuleImpl::AudioCodingModuleImpl(const int32_t id, Clock* clock)
- : packetization_callback_(NULL),
- id_(id),
- last_timestamp_(0xD87F3F9F),
- last_in_timestamp_(0xD87F3F9F),
- send_codec_inst_(),
- cng_nb_pltype_(255),
- cng_wb_pltype_(255),
- cng_swb_pltype_(255),
- cng_fb_pltype_(255),
- red_pltype_(255),
- vad_enabled_(false),
- dtx_enabled_(false),
- vad_mode_(VADNormal),
- stereo_receive_registered_(false),
- stereo_send_(false),
- prev_received_channel_(0),
- expected_channels_(1),
- current_send_codec_idx_(-1),
- current_receive_codec_idx_(-1),
- send_codec_registered_(false),
- acm_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- vad_callback_(NULL),
- last_recv_audio_codec_pltype_(255),
- is_first_red_(true),
- fec_enabled_(false),
- last_fec_timestamp_(0),
- receive_red_pltype_(255),
- previous_pltype_(255),
- dummy_rtp_header_(NULL),
- recv_pl_frame_size_smpls_(0),
- receiver_initialized_(false),
- dtmf_detector_(NULL),
- dtmf_callback_(NULL),
- last_detected_tone_(kACMToneEnd),
- callback_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- secondary_send_codec_inst_(),
- initial_delay_ms_(0),
- num_packets_accumulated_(0),
- num_bytes_accumulated_(0),
- accumulated_audio_ms_(0),
- first_payload_received_(false),
- last_incoming_send_timestamp_(0),
- track_neteq_buffer_(false),
- playout_ts_(0),
- av_sync_(false),
- last_timestamp_diff_(kDefaultTimestampDiff),
- last_sequence_number_(0),
- last_ssrc_(0),
- last_packet_was_sync_(false),
- clock_(clock),
- nack_(),
- nack_enabled_(false) {
-
- // Nullify send codec memory, set payload type and set codec name to
- // invalid values.
- const char no_name[] = "noCodecRegistered";
- strncpy(send_codec_inst_.plname, no_name, RTP_PAYLOAD_NAME_SIZE - 1);
- send_codec_inst_.pltype = -1;
-
- strncpy(secondary_send_codec_inst_.plname, no_name,
- RTP_PAYLOAD_NAME_SIZE - 1);
- secondary_send_codec_inst_.pltype = -1;
-
- for (int i = 0; i < ACMCodecDB::kMaxNumCodecs; i++) {
- codecs_[i] = NULL;
- registered_pltypes_[i] = -1;
- stereo_receive_[i] = false;
- slave_codecs_[i] = NULL;
- mirror_codec_idx_[i] = -1;
- }
-
- neteq_.set_id(id_);
-
- // Allocate memory for RED.
- red_buffer_ = new uint8_t[MAX_PAYLOAD_SIZE_BYTE];
-
- // TODO(turajs): This might not be exactly how this class is supposed to work.
- // The external usage might be that |fragmentationVectorSize| has to match
- // the allocated space for the member-arrays, while here, we allocate
- // according to the maximum number of fragmentations and change
- // |fragmentationVectorSize| on-the-fly based on actual number of
- // fragmentations. However, due to copying to local variable before calling
- // SendData, the RTP module receives a "valid" fragmentation, where allocated
- // space matches |fragmentationVectorSize|, therefore, this should not cause
- // any problem. A better approach is not using RTPFragmentationHeader as
- // member variable, instead, use an ACM-specific structure to hold RED-related
- // data. See module_common_type.h for the definition of
- // RTPFragmentationHeader.
- fragmentation_.VerifyAndAllocateFragmentationHeader(
- kMaxNumFragmentationVectors);
-
- // Register the default payload type for RED and for CNG at sampling rates of
- // 8, 16, 32 and 48 kHz.
- for (int i = (ACMCodecDB::kNumCodecs - 1); i >= 0; i--) {
- if (IsCodecRED(i)) {
- red_pltype_ = static_cast<uint8_t>(ACMCodecDB::database_[i].pltype);
- } else if (IsCodecCN(i)) {
- if (ACMCodecDB::database_[i].plfreq == 8000) {
- cng_nb_pltype_ = static_cast<uint8_t>(ACMCodecDB::database_[i].pltype);
- } else if (ACMCodecDB::database_[i].plfreq == 16000) {
- cng_wb_pltype_ = static_cast<uint8_t>(ACMCodecDB::database_[i].pltype);
- } else if (ACMCodecDB::database_[i].plfreq == 32000) {
- cng_swb_pltype_ = static_cast<uint8_t>(ACMCodecDB::database_[i].pltype);
- } else if (ACMCodecDB::database_[i].plfreq == 48000) {
- cng_fb_pltype_ = static_cast<uint8_t>(ACMCodecDB::database_[i].pltype);
- }
- }
- }
-
- if (InitializeReceiverSafe() < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot initialize receiver");
- }
- WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceAudioCoding, id, "Created");
-}
-
-AudioCodingModuleImpl::~AudioCodingModuleImpl() {
- {
- CriticalSectionScoped lock(acm_crit_sect_);
- current_send_codec_idx_ = -1;
-
- for (int i = 0; i < ACMCodecDB::kMaxNumCodecs; i++) {
- if (codecs_[i] != NULL) {
- // True stereo codecs share the same memory for master and
- // slave, so slave codec need to be nullified here, since the
- // memory will be deleted.
- if (slave_codecs_[i] == codecs_[i]) {
- slave_codecs_[i] = NULL;
- }
-
- // Mirror index holds the address of the codec memory.
- assert(mirror_codec_idx_[i] > -1);
- if (codecs_[mirror_codec_idx_[i]] != NULL) {
- delete codecs_[mirror_codec_idx_[i]];
- codecs_[mirror_codec_idx_[i]] = NULL;
- }
-
- codecs_[i] = NULL;
- }
-
- if (slave_codecs_[i] != NULL) {
- // Delete memory for stereo usage of mono codecs.
- assert(mirror_codec_idx_[i] > -1);
- if (slave_codecs_[mirror_codec_idx_[i]] != NULL) {
- delete slave_codecs_[mirror_codec_idx_[i]];
- slave_codecs_[mirror_codec_idx_[i]] = NULL;
- }
- slave_codecs_[i] = NULL;
- }
- }
-
- if (dtmf_detector_ != NULL) {
- delete dtmf_detector_;
- dtmf_detector_ = NULL;
- }
- if (dummy_rtp_header_ != NULL) {
- delete dummy_rtp_header_;
- dummy_rtp_header_ = NULL;
- }
- if (red_buffer_ != NULL) {
- delete[] red_buffer_;
- red_buffer_ = NULL;
- }
- }
-
- delete callback_crit_sect_;
- callback_crit_sect_ = NULL;
-
- delete acm_crit_sect_;
- acm_crit_sect_ = NULL;
- WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceAudioCoding, id_,
- "Destroyed");
-}
-
-int32_t AudioCodingModuleImpl::ChangeUniqueId(const int32_t id) {
- {
- CriticalSectionScoped lock(acm_crit_sect_);
- id_ = id;
-
- for (int i = 0; i < ACMCodecDB::kMaxNumCodecs; i++) {
- if (codecs_[i] != NULL) {
- codecs_[i]->SetUniqueID(id);
- }
- }
- }
-
- neteq_.set_id(id_);
- return 0;
-}
-
-// Returns the number of milliseconds until the module want a
-// worker thread to call Process.
-int32_t AudioCodingModuleImpl::TimeUntilNextProcess() {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (!HaveValidEncoder("TimeUntilNextProcess")) {
- return -1;
- }
- return codecs_[current_send_codec_idx_]->SamplesLeftToEncode() /
- (send_codec_inst_.plfreq / 1000);
-}
-
-int32_t AudioCodingModuleImpl::Process() {
- bool dual_stream;
- {
- CriticalSectionScoped lock(acm_crit_sect_);
- dual_stream = (secondary_encoder_.get() != NULL);
- }
- if (dual_stream) {
- return ProcessDualStream();
- }
- return ProcessSingleStream();
-}
-
-int AudioCodingModuleImpl::EncodeFragmentation(int fragmentation_index,
- int payload_type,
- uint32_t current_timestamp,
- ACMGenericCodec* encoder,
- uint8_t* stream) {
- int16_t len_bytes = MAX_PAYLOAD_SIZE_BYTE;
- uint32_t rtp_timestamp;
- WebRtcACMEncodingType encoding_type;
- if (encoder->Encode(stream, &len_bytes, &rtp_timestamp, &encoding_type) < 0) {
- return -1;
- }
- assert(encoding_type == kActiveNormalEncoded);
- assert(len_bytes > 0);
-
- fragmentation_.fragmentationLength[fragmentation_index] = len_bytes;
- fragmentation_.fragmentationPlType[fragmentation_index] = payload_type;
- fragmentation_.fragmentationTimeDiff[fragmentation_index] =
- static_cast<uint16_t>(current_timestamp - rtp_timestamp);
- fragmentation_.fragmentationVectorSize++;
- return len_bytes;
-}
-
-// Primary payloads are sent immediately, whereas a single secondary payload is
-// buffered to be combined with "the next payload."
-// Normally "the next payload" would be a primary payload. In case two
-// consecutive secondary payloads are generated with no primary payload in
-// between, then two secondary payloads are packed in one RED.
-int AudioCodingModuleImpl::ProcessDualStream() {
- uint8_t stream[kMaxNumFragmentationVectors * MAX_PAYLOAD_SIZE_BYTE];
- uint32_t current_timestamp;
- int16_t length_bytes = 0;
- RTPFragmentationHeader my_fragmentation;
-
- uint8_t my_red_payload_type;
-
- {
- CriticalSectionScoped lock(acm_crit_sect_);
- // Check if there is an encoder before.
- if (!HaveValidEncoder("ProcessDualStream") ||
- secondary_encoder_.get() == NULL) {
- return -1;
- }
- ACMGenericCodec* primary_encoder = codecs_[current_send_codec_idx_];
- // If primary encoder has a full frame of audio to generate payload.
- bool primary_ready_to_encode = primary_encoder->HasFrameToEncode();
- // If the secondary encoder has a frame of audio to generate a payload.
- bool secondary_ready_to_encode = secondary_encoder_->HasFrameToEncode();
-
- if (!primary_ready_to_encode && !secondary_ready_to_encode) {
- // Nothing to send.
- return 0;
- }
- int len_bytes_previous_secondary = static_cast<int>(
- fragmentation_.fragmentationLength[2]);
- assert(len_bytes_previous_secondary <= MAX_PAYLOAD_SIZE_BYTE);
- bool has_previous_payload = len_bytes_previous_secondary > 0;
-
- uint32_t primary_timestamp = primary_encoder->EarliestTimestamp();
- uint32_t secondary_timestamp = secondary_encoder_->EarliestTimestamp();
-
- if (!has_previous_payload && !primary_ready_to_encode &&
- secondary_ready_to_encode) {
- // Secondary payload will be the ONLY bit-stream. Encode by secondary
- // encoder, store the payload, and return. No packet is sent.
- int16_t len_bytes = MAX_PAYLOAD_SIZE_BYTE;
- WebRtcACMEncodingType encoding_type;
- if (secondary_encoder_->Encode(red_buffer_, &len_bytes,
- &last_fec_timestamp_,
- &encoding_type) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "ProcessDual(): Encoding of secondary encoder Failed");
- return -1;
- }
- assert(len_bytes > 0);
- assert(encoding_type == kActiveNormalEncoded);
- assert(len_bytes <= MAX_PAYLOAD_SIZE_BYTE);
- fragmentation_.fragmentationLength[2] = len_bytes;
- return 0;
- }
-
- // Initialize with invalid but different values, so later can have sanity
- // check if they are different.
- int index_primary = -1;
- int index_secondary = -2;
- int index_previous_secondary = -3;
-
- if (primary_ready_to_encode) {
- index_primary = secondary_ready_to_encode ?
- TimestampLessThan(primary_timestamp, secondary_timestamp) : 0;
- index_primary += has_previous_payload ?
- TimestampLessThan(primary_timestamp, last_fec_timestamp_) : 0;
- }
-
- if (secondary_ready_to_encode) {
- // Timestamp of secondary payload can only be less than primary payload,
- // but is always larger than the timestamp of previous secondary payload.
- index_secondary = primary_ready_to_encode ?
- (1 - TimestampLessThan(primary_timestamp, secondary_timestamp)) : 0;
- }
-
- if (has_previous_payload) {
- index_previous_secondary = primary_ready_to_encode ?
- (1 - TimestampLessThan(primary_timestamp, last_fec_timestamp_)) : 0;
- // If secondary is ready it always have a timestamp larger than previous
- // secondary. So the index is either 0 or 1.
- index_previous_secondary += secondary_ready_to_encode ? 1 : 0;
- }
-
- // Indices must not be equal.
- assert(index_primary != index_secondary);
- assert(index_primary != index_previous_secondary);
- assert(index_secondary != index_previous_secondary);
-
- // One of the payloads has to be at position zero.
- assert(index_primary == 0 || index_secondary == 0 ||
- index_previous_secondary == 0);
-
- // Timestamp of the RED payload.
- if (index_primary == 0) {
- current_timestamp = primary_timestamp;
- } else if (index_secondary == 0) {
- current_timestamp = secondary_timestamp;
- } else {
- current_timestamp = last_fec_timestamp_;
- }
-
- fragmentation_.fragmentationVectorSize = 0;
- if (has_previous_payload) {
- assert(index_previous_secondary >= 0 &&
- index_previous_secondary < kMaxNumFragmentationVectors);
- assert(len_bytes_previous_secondary <= MAX_PAYLOAD_SIZE_BYTE);
- memcpy(&stream[index_previous_secondary * MAX_PAYLOAD_SIZE_BYTE],
- red_buffer_, sizeof(stream[0]) * len_bytes_previous_secondary);
- fragmentation_.fragmentationLength[index_previous_secondary] =
- len_bytes_previous_secondary;
- fragmentation_.fragmentationPlType[index_previous_secondary] =
- secondary_send_codec_inst_.pltype;
- fragmentation_.fragmentationTimeDiff[index_previous_secondary] =
- static_cast<uint16_t>(current_timestamp - last_fec_timestamp_);
- fragmentation_.fragmentationVectorSize++;
- }
-
- if (primary_ready_to_encode) {
- assert(index_primary >= 0 && index_primary < kMaxNumFragmentationVectors);
- int i = index_primary * MAX_PAYLOAD_SIZE_BYTE;
- if (EncodeFragmentation(index_primary, send_codec_inst_.pltype,
- current_timestamp, primary_encoder,
- &stream[i]) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "ProcessDualStream(): Encoding of primary encoder Failed");
- return -1;
- }
- }
-
- if (secondary_ready_to_encode) {
- assert(index_secondary >= 0 &&
- index_secondary < kMaxNumFragmentationVectors - 1);
- int i = index_secondary * MAX_PAYLOAD_SIZE_BYTE;
- if (EncodeFragmentation(index_secondary,
- secondary_send_codec_inst_.pltype,
- current_timestamp, secondary_encoder_.get(),
- &stream[i]) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "ProcessDualStream(): Encoding of secondary encoder "
- "Failed");
- return -1;
- }
- }
- // Copy to local variable, as it will be used outside the ACM lock.
- my_fragmentation.CopyFrom(fragmentation_);
- my_red_payload_type = red_pltype_;
- length_bytes = 0;
- for (int n = 0; n < fragmentation_.fragmentationVectorSize; n++) {
- length_bytes += fragmentation_.fragmentationLength[n];
- }
- }
-
- {
- CriticalSectionScoped lock(callback_crit_sect_);
- if (packetization_callback_ != NULL) {
- // Callback with payload data, including redundant data (FEC/RED).
- if (packetization_callback_->SendData(kAudioFrameSpeech,
- my_red_payload_type,
- current_timestamp, stream,
- length_bytes,
- &my_fragmentation) < 0) {
- return -1;
- }
- }
- }
-
- {
- CriticalSectionScoped lock(acm_crit_sect_);
- // Now that data is sent, clean up fragmentation.
- ResetFragmentation(0);
- }
- return 0;
-}
-
-// Process any pending tasks such as timeouts.
-int AudioCodingModuleImpl::ProcessSingleStream() {
- // Make room for 1 RED payload.
- uint8_t stream[2 * MAX_PAYLOAD_SIZE_BYTE];
- int16_t length_bytes = 2 * MAX_PAYLOAD_SIZE_BYTE;
- int16_t red_length_bytes = length_bytes;
- uint32_t rtp_timestamp;
- int16_t status;
- WebRtcACMEncodingType encoding_type;
- FrameType frame_type = kAudioFrameSpeech;
- uint8_t current_payload_type = 0;
- bool has_data_to_send = false;
- bool fec_active = false;
- RTPFragmentationHeader my_fragmentation;
-
- // Keep the scope of the ACM critical section limited.
- {
- CriticalSectionScoped lock(acm_crit_sect_);
- // Check if there is an encoder before.
- if (!HaveValidEncoder("ProcessSingleStream")) {
- return -1;
- }
- status = codecs_[current_send_codec_idx_]->Encode(stream, &length_bytes,
- &rtp_timestamp,
- &encoding_type);
- if (status < 0) {
- // Encode failed.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "ProcessSingleStream(): Encoding Failed");
- length_bytes = 0;
- return -1;
- } else if (status == 0) {
- // Not enough data.
- return 0;
- } else {
- switch (encoding_type) {
- case kNoEncoding: {
- current_payload_type = previous_pltype_;
- frame_type = kFrameEmpty;
- length_bytes = 0;
- break;
- }
- case kActiveNormalEncoded:
- case kPassiveNormalEncoded: {
- current_payload_type = static_cast<uint8_t>(send_codec_inst_.pltype);
- frame_type = kAudioFrameSpeech;
- break;
- }
- case kPassiveDTXNB: {
- current_payload_type = cng_nb_pltype_;
- frame_type = kAudioFrameCN;
- is_first_red_ = true;
- break;
- }
- case kPassiveDTXWB: {
- current_payload_type = cng_wb_pltype_;
- frame_type = kAudioFrameCN;
- is_first_red_ = true;
- break;
- }
- case kPassiveDTXSWB: {
- current_payload_type = cng_swb_pltype_;
- frame_type = kAudioFrameCN;
- is_first_red_ = true;
- break;
- }
- case kPassiveDTXFB: {
- current_payload_type = cng_fb_pltype_;
- frame_type = kAudioFrameCN;
- is_first_red_ = true;
- break;
- }
- }
- has_data_to_send = true;
- previous_pltype_ = current_payload_type;
-
- // Redundancy encode is done here. The two bitstreams packetized into
- // one RTP packet and the fragmentation points are set.
- // Only apply RED on speech data.
- if ((fec_enabled_) &&
- ((encoding_type == kActiveNormalEncoded) ||
- (encoding_type == kPassiveNormalEncoded))) {
- // FEC is enabled within this scope.
- //
- // Note that, a special solution exists for iSAC since it is the only
- // codec for which GetRedPayload has a non-empty implementation.
- //
- // Summary of the FEC scheme below (use iSAC as example):
- //
- // 1st (is_first_red_ is true) encoded iSAC frame (primary #1) =>
- // - call GetRedPayload() and store redundancy for packet #1 in
- // second fragment of RED buffer (old data)
- // - drop the primary iSAC frame
- // - don't call SendData
- // 2nd (is_first_red_ is false) encoded iSAC frame (primary #2) =>
- // - store primary #2 in 1st fragment of RED buffer and send the
- // combined packet
- // - the transmitted packet contains primary #2 (new) and
- // reduncancy for packet #1 (old)
- // - call GetRed_Payload() and store redundancy for packet #2 in
- // second fragment of RED buffer
- //
- // ...
- //
- // Nth encoded iSAC frame (primary #N) =>
- // - store primary #N in 1st fragment of RED buffer and send the
- // combined packet
- // - the transmitted packet contains primary #N (new) and
- // reduncancy for packet #(N-1) (old)
- // - call GetRedPayload() and store redundancy for packet #N in
- // second fragment of RED buffer
- //
- // For all other codecs, GetRedPayload does nothing and returns -1 =>
- // redundant data is only a copy.
- //
- // First combined packet contains : #2 (new) and #1 (old)
- // Second combined packet contains: #3 (new) and #2 (old)
- // Third combined packet contains : #4 (new) and #3 (old)
- //
- // Hence, even if every second packet is dropped, perfect
- // reconstruction is possible.
- fec_active = true;
-
- has_data_to_send = false;
- // Skip the following part for the first packet in a RED session.
- if (!is_first_red_) {
- // Rearrange stream such that FEC packets are included.
- // Replace stream now that we have stored current stream.
- memcpy(stream + fragmentation_.fragmentationOffset[1], red_buffer_,
- fragmentation_.fragmentationLength[1]);
- // Update the fragmentation time difference vector, in number of
- // timestamps.
- uint16_t time_since_last = static_cast<uint16_t>(rtp_timestamp -
- last_fec_timestamp_);
-
- // Update fragmentation vectors.
- fragmentation_.fragmentationPlType[1] =
- fragmentation_.fragmentationPlType[0];
- fragmentation_.fragmentationTimeDiff[1] = time_since_last;
- has_data_to_send = true;
- }
-
- // Insert new packet length.
- fragmentation_.fragmentationLength[0] = length_bytes;
-
- // Insert new packet payload type.
- fragmentation_.fragmentationPlType[0] = current_payload_type;
- last_fec_timestamp_ = rtp_timestamp;
-
- // Can be modified by the GetRedPayload() call if iSAC is utilized.
- red_length_bytes = length_bytes;
-
- // A fragmentation header is provided => packetization according to
- // RFC 2198 (RTP Payload for Redundant Audio Data) will be used.
- // First fragment is the current data (new).
- // Second fragment is the previous data (old).
- length_bytes = static_cast<int16_t>(
- fragmentation_.fragmentationLength[0] +
- fragmentation_.fragmentationLength[1]);
-
- // Get, and store, redundant data from the encoder based on the recently
- // encoded frame.
- // NOTE - only iSAC contains an implementation; all other codecs does
- // nothing and returns -1.
- if (codecs_[current_send_codec_idx_]->GetRedPayload(
- red_buffer_,
- &red_length_bytes) == -1) {
- // The codec was not iSAC => use current encoder output as redundant
- // data instead (trivial FEC scheme).
- memcpy(red_buffer_, stream, red_length_bytes);
- }
-
- is_first_red_ = false;
- // Update payload type with RED payload type.
- current_payload_type = red_pltype_;
- // We have packed 2 payloads.
- fragmentation_.fragmentationVectorSize = kNumFecFragmentationVectors;
-
- // Copy to local variable, as it will be used outside ACM lock.
- my_fragmentation.CopyFrom(fragmentation_);
- // Store RED length.
- fragmentation_.fragmentationLength[1] = red_length_bytes;
- }
- }
- }
-
- if (has_data_to_send) {
- CriticalSectionScoped lock(callback_crit_sect_);
-
- if (packetization_callback_ != NULL) {
- if (fec_active) {
- // Callback with payload data, including redundant data (FEC/RED).
- packetization_callback_->SendData(frame_type, current_payload_type,
- rtp_timestamp, stream,
- length_bytes,
- &my_fragmentation);
- } else {
- // Callback with payload data.
- packetization_callback_->SendData(frame_type, current_payload_type,
- rtp_timestamp, stream,
- length_bytes, NULL);
- }
- }
-
- if (vad_callback_ != NULL) {
- // Callback with VAD decision.
- vad_callback_->InFrameType(static_cast<int16_t>(encoding_type));
- }
- }
- return length_bytes;
-}
-
-/////////////////////////////////////////
-// Sender
-//
-
-// Initialize send codec.
-int32_t AudioCodingModuleImpl::InitializeSender() {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- // Start with invalid values.
- send_codec_registered_ = false;
- current_send_codec_idx_ = -1;
- send_codec_inst_.plname[0] = '\0';
-
- // Delete all encoders to start fresh.
- for (int id = 0; id < ACMCodecDB::kMaxNumCodecs; id++) {
- if (codecs_[id] != NULL) {
- codecs_[id]->DestructEncoder();
- }
- }
-
- // Initialize FEC/RED.
- is_first_red_ = true;
- if (fec_enabled_ || secondary_encoder_.get() != NULL) {
- if (red_buffer_ != NULL) {
- memset(red_buffer_, 0, MAX_PAYLOAD_SIZE_BYTE);
- }
- if (fec_enabled_) {
- ResetFragmentation(kNumFecFragmentationVectors);
- } else {
- ResetFragmentation(0);
- }
- }
-
- return 0;
-}
-
-int32_t AudioCodingModuleImpl::ResetEncoder() {
- CriticalSectionScoped lock(acm_crit_sect_);
- if (!HaveValidEncoder("ResetEncoder")) {
- return -1;
- }
- return codecs_[current_send_codec_idx_]->ResetEncoder();
-}
-
-void AudioCodingModuleImpl::UnregisterSendCodec() {
- CriticalSectionScoped lock(acm_crit_sect_);
- send_codec_registered_ = false;
- current_send_codec_idx_ = -1;
- // If send Codec is unregistered then remove the secondary codec as well.
- if (secondary_encoder_.get() != NULL)
- secondary_encoder_.reset();
- return;
-}
-
-ACMGenericCodec* AudioCodingModuleImpl::CreateCodec(const CodecInst& codec) {
- ACMGenericCodec* my_codec = NULL;
-
- my_codec = ACMCodecDB::CreateCodecInstance(&codec);
- if (my_codec == NULL) {
- // Error, could not create the codec.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "ACMCodecDB::CreateCodecInstance() failed in CreateCodec()");
- return my_codec;
- }
- my_codec->SetUniqueID(id_);
- my_codec->SetNetEqDecodeLock(neteq_.DecodeLock());
-
- return my_codec;
-}
-
-// Check if the given codec is a valid to be registered as send codec.
-static int IsValidSendCodec(const CodecInst& send_codec,
- bool is_primary_encoder,
- int acm_id,
- int* mirror_id) {
- if ((send_codec.channels != 1) && (send_codec.channels != 2)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, acm_id,
- "Wrong number of channels (%d, only mono and stereo are "
- "supported) for %s encoder", send_codec.channels,
- is_primary_encoder ? "primary" : "secondary");
- return -1;
- }
-
- int codec_id = ACMCodecDB::CodecNumber(&send_codec, mirror_id);
- if (codec_id < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, acm_id,
- "Invalid settings for the send codec.");
- return -1;
- }
-
- // TODO(tlegrand): Remove this check. Already taken care of in
- // ACMCodecDB::CodecNumber().
- // Check if the payload-type is valid
- if (!ACMCodecDB::ValidPayloadType(send_codec.pltype)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, acm_id,
- "Invalid payload-type %d for %s.", send_codec.pltype,
- send_codec.plname);
- return -1;
- }
-
- // Telephone-event cannot be a send codec.
- if (!STR_CASE_CMP(send_codec.plname, "telephone-event")) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, acm_id,
- "telephone-event cannot be a send codec");
- *mirror_id = -1;
- return -1;
- }
-
- if (ACMCodecDB::codec_settings_[codec_id].channel_support
- < send_codec.channels) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, acm_id,
- "%d number of channels not supportedn for %s.",
- send_codec.channels, send_codec.plname);
- *mirror_id = -1;
- return -1;
- }
-
- if (!is_primary_encoder) {
- // If registering the secondary encoder, then RED and CN are not valid
- // choices as encoder.
- if (IsCodecRED(&send_codec)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, acm_id,
- "RED cannot be secondary codec");
- *mirror_id = -1;
- return -1;
- }
-
- if (IsCodecCN(&send_codec)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, acm_id,
- "DTX cannot be secondary codec");
- *mirror_id = -1;
- return -1;
- }
- }
- return codec_id;
-}
-
-int AudioCodingModuleImpl::RegisterSecondarySendCodec(
- const CodecInst& send_codec) {
- CriticalSectionScoped lock(acm_crit_sect_);
- if (!send_codec_registered_) {
- return -1;
- }
- // Primary and Secondary codecs should have the same sampling rates.
- if (send_codec.plfreq != send_codec_inst_.plfreq) {
- return -1;
- }
- int mirror_id;
- int codec_id = IsValidSendCodec(send_codec, false, id_, &mirror_id);
- if (codec_id < 0) {
- return -1;
- }
- ACMGenericCodec* encoder = CreateCodec(send_codec);
- WebRtcACMCodecParams codec_params;
- // Initialize the codec before registering. For secondary codec VAD & DTX are
- // disabled.
- memcpy(&(codec_params.codec_inst), &send_codec, sizeof(CodecInst));
- codec_params.enable_vad = false;
- codec_params.enable_dtx = false;
- codec_params.vad_mode = VADNormal;
- // Force initialization.
- if (encoder->InitEncoder(&codec_params, true) < 0) {
- // Could not initialize, therefore cannot be registered.
- delete encoder;
- return -1;
- }
- secondary_encoder_.reset(encoder);
- memcpy(&secondary_send_codec_inst_, &send_codec, sizeof(send_codec));
-
- // Disable VAD & DTX.
- SetVADSafe(false, false, VADNormal);
-
- // Cleaning.
- if (red_buffer_) {
- memset(red_buffer_, 0, MAX_PAYLOAD_SIZE_BYTE);
- }
- ResetFragmentation(0);
- return 0;
-}
-
-void AudioCodingModuleImpl::UnregisterSecondarySendCodec() {
- CriticalSectionScoped lock(acm_crit_sect_);
- if (secondary_encoder_.get() == NULL) {
- return;
- }
- secondary_encoder_.reset();
- ResetFragmentation(0);
-}
-
-int AudioCodingModuleImpl::SecondarySendCodec(
- CodecInst* secondary_codec) const {
- CriticalSectionScoped lock(acm_crit_sect_);
- if (secondary_encoder_.get() == NULL) {
- return -1;
- }
- memcpy(secondary_codec, &secondary_send_codec_inst_,
- sizeof(secondary_send_codec_inst_));
- return 0;
-}
-
-// Can be called multiple times for Codec, CNG, RED.
-int32_t AudioCodingModuleImpl::RegisterSendCodec(
- const CodecInst& send_codec) {
- int mirror_id;
- int codec_id = IsValidSendCodec(send_codec, true, id_, &mirror_id);
-
- CriticalSectionScoped lock(acm_crit_sect_);
-
- // Check for reported errors from function IsValidSendCodec().
- if (codec_id < 0) {
- if (!send_codec_registered_) {
- // This values has to be NULL if there is no codec registered.
- current_send_codec_idx_ = -1;
- }
- return -1;
- }
-
- // RED can be registered with other payload type. If not registered a default
- // payload type is used.
- if (IsCodecRED(&send_codec)) {
- // TODO(tlegrand): Remove this check. Already taken care of in
- // ACMCodecDB::CodecNumber().
- // Check if the payload-type is valid
- if (!ACMCodecDB::ValidPayloadType(send_codec.pltype)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Invalid payload-type %d for %s.", send_codec.pltype,
- send_codec.plname);
- return -1;
- }
- // Set RED payload type.
- red_pltype_ = static_cast<uint8_t>(send_codec.pltype);
- return 0;
- }
-
- // CNG can be registered with other payload type. If not registered the
- // default payload types from codec database will be used.
- if (IsCodecCN(&send_codec)) {
- // CNG is registered.
- switch (send_codec.plfreq) {
- case 8000: {
- cng_nb_pltype_ = static_cast<uint8_t>(send_codec.pltype);
- break;
- }
- case 16000: {
- cng_wb_pltype_ = static_cast<uint8_t>(send_codec.pltype);
- break;
- }
- case 32000: {
- cng_swb_pltype_ = static_cast<uint8_t>(send_codec.pltype);
- break;
- }
- case 48000: {
- cng_fb_pltype_ = static_cast<uint8_t>(send_codec.pltype);
- break;
- }
- default: {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RegisterSendCodec() failed, invalid frequency for CNG "
- "registration");
- return -1;
- }
- }
- return 0;
- }
-
- // Set Stereo, and make sure VAD and DTX is turned off.
- if (send_codec.channels == 2) {
- stereo_send_ = true;
- if (vad_enabled_ || dtx_enabled_) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
- "VAD/DTX is turned off, not supported when sending stereo.");
- }
- vad_enabled_ = false;
- dtx_enabled_ = false;
- } else {
- stereo_send_ = false;
- }
-
- // Check if the codec is already registered as send codec.
- bool is_send_codec;
- if (send_codec_registered_) {
- int send_codec_mirror_id;
- int send_codec_id = ACMCodecDB::CodecNumber(&send_codec_inst_,
- &send_codec_mirror_id);
- assert(send_codec_id >= 0);
- is_send_codec = (send_codec_id == codec_id) ||
- (mirror_id == send_codec_mirror_id);
- } else {
- is_send_codec = false;
- }
-
- // If there is secondary codec registered and the new send codec has a
- // sampling rate different than that of secondary codec, then unregister the
- // secondary codec.
- if (secondary_encoder_.get() != NULL &&
- secondary_send_codec_inst_.plfreq != send_codec.plfreq) {
- secondary_encoder_.reset();
- ResetFragmentation(0);
- }
-
- // If new codec, or new settings, register.
- if (!is_send_codec) {
- if (codecs_[mirror_id] == NULL) {
- codecs_[mirror_id] = CreateCodec(send_codec);
- if (codecs_[mirror_id] == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot Create the codec");
- return -1;
- }
- mirror_codec_idx_[mirror_id] = mirror_id;
- }
-
- if (mirror_id != codec_id) {
- codecs_[codec_id] = codecs_[mirror_id];
- mirror_codec_idx_[codec_id] = mirror_id;
- }
-
- ACMGenericCodec* codec_ptr = codecs_[codec_id];
- WebRtcACMCodecParams codec_params;
-
- memcpy(&(codec_params.codec_inst), &send_codec, sizeof(CodecInst));
- codec_params.enable_vad = vad_enabled_;
- codec_params.enable_dtx = dtx_enabled_;
- codec_params.vad_mode = vad_mode_;
- // Force initialization.
- if (codec_ptr->InitEncoder(&codec_params, true) < 0) {
- // Could not initialize the encoder.
-
- // Check if already have a registered codec.
- // Depending on that different messages are logged.
- if (!send_codec_registered_) {
- current_send_codec_idx_ = -1;
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot Initialize the encoder No Encoder is registered");
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot Initialize the encoder, continue encoding with "
- "the previously registered codec");
- }
- return -1;
- }
-
- // Update states.
- dtx_enabled_ = codec_params.enable_dtx;
- vad_enabled_ = codec_params.enable_vad;
- vad_mode_ = codec_params.vad_mode;
-
- // Everything is fine so we can replace the previous codec with this one.
- if (send_codec_registered_) {
- // If we change codec we start fresh with FEC.
- // This is not strictly required by the standard.
- is_first_red_ = true;
-
- codec_ptr->SetVAD(&dtx_enabled_, &vad_enabled_, &vad_mode_);
- }
-
- current_send_codec_idx_ = codec_id;
- send_codec_registered_ = true;
- memcpy(&send_codec_inst_, &send_codec, sizeof(CodecInst));
- previous_pltype_ = send_codec_inst_.pltype;
- return 0;
- } else {
- // If codec is the same as already registered check if any parameters
- // has changed compared to the current values.
- // If any parameter is valid then apply it and record.
- bool force_init = false;
-
- if (mirror_id != codec_id) {
- codecs_[codec_id] = codecs_[mirror_id];
- mirror_codec_idx_[codec_id] = mirror_id;
- }
-
- // Check the payload type.
- if (send_codec.pltype != send_codec_inst_.pltype) {
- // At this point check if the given payload type is valid.
- // Record it later when the sampling frequency is changed
- // successfully.
- if (!ACMCodecDB::ValidPayloadType(send_codec.pltype)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Out of range payload type");
- return -1;
- }
- }
-
- // If there is a codec that ONE instance of codec supports multiple
- // sampling frequencies, then we need to take care of it here.
- // one such a codec is iSAC. Both WB and SWB are encoded and decoded
- // with one iSAC instance. Therefore, we need to update the encoder
- // frequency if required.
- if (send_codec_inst_.plfreq != send_codec.plfreq) {
- force_init = true;
-
- // If sampling frequency is changed we have to start fresh with RED.
- is_first_red_ = true;
- }
-
- // If packet size or number of channels has changed, we need to
- // re-initialize the encoder.
- if (send_codec_inst_.pacsize != send_codec.pacsize) {
- force_init = true;
- }
- if (send_codec_inst_.channels != send_codec.channels) {
- force_init = true;
- }
-
- if (force_init) {
- WebRtcACMCodecParams codec_params;
-
- memcpy(&(codec_params.codec_inst), &send_codec, sizeof(CodecInst));
- codec_params.enable_vad = vad_enabled_;
- codec_params.enable_dtx = dtx_enabled_;
- codec_params.vad_mode = vad_mode_;
-
- // Force initialization.
- if (codecs_[current_send_codec_idx_]->InitEncoder(&codec_params,
- true) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Could not change the codec packet-size.");
- return -1;
- }
-
- send_codec_inst_.plfreq = send_codec.plfreq;
- send_codec_inst_.pacsize = send_codec.pacsize;
- send_codec_inst_.channels = send_codec.channels;
- }
-
- // If the change of sampling frequency has been successful then
- // we store the payload-type.
- send_codec_inst_.pltype = send_codec.pltype;
-
- // Check if a change in Rate is required.
- if (send_codec.rate != send_codec_inst_.rate) {
- if (codecs_[codec_id]->SetBitRate(send_codec.rate) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Could not change the codec rate.");
- return -1;
- }
- send_codec_inst_.rate = send_codec.rate;
- }
- previous_pltype_ = send_codec_inst_.pltype;
-
- return 0;
- }
-}
-
-// Get current send codec.
-int32_t AudioCodingModuleImpl::SendCodec(
- CodecInst* current_codec) const {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "SendCodec()");
- CriticalSectionScoped lock(acm_crit_sect_);
-
- assert(current_codec);
- if (!send_codec_registered_) {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "SendCodec Failed, no codec is registered");
-
- return -1;
- }
- WebRtcACMCodecParams encoder_param;
- codecs_[current_send_codec_idx_]->EncoderParams(&encoder_param);
- encoder_param.codec_inst.pltype = send_codec_inst_.pltype;
- memcpy(current_codec, &(encoder_param.codec_inst), sizeof(CodecInst));
-
- return 0;
-}
-
-// Get current send frequency.
-int32_t AudioCodingModuleImpl::SendFrequency() const {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "SendFrequency()");
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (!send_codec_registered_) {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "SendFrequency Failed, no codec is registered");
-
- return -1;
- }
-
- return send_codec_inst_.plfreq;
-}
-
-// Get encode bitrate.
-// Adaptive rate codecs return their current encode target rate, while other
-// codecs return there longterm avarage or their fixed rate.
-int32_t AudioCodingModuleImpl::SendBitrate() const {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (!send_codec_registered_) {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "SendBitrate Failed, no codec is registered");
-
- return -1;
- }
-
- WebRtcACMCodecParams encoder_param;
- codecs_[current_send_codec_idx_]->EncoderParams(&encoder_param);
-
- return encoder_param.codec_inst.rate;
-}
-
-// Set available bandwidth, inform the encoder about the estimated bandwidth
-// received from the remote party.
-int32_t AudioCodingModuleImpl::SetReceivedEstimatedBandwidth(
- const int32_t bw) {
- return codecs_[current_send_codec_idx_]->SetEstimatedBandwidth(bw);
-}
-
-// Register a transport callback which will be called to deliver
-// the encoded buffers.
-int32_t AudioCodingModuleImpl::RegisterTransportCallback(
- AudioPacketizationCallback* transport) {
- CriticalSectionScoped lock(callback_crit_sect_);
- packetization_callback_ = transport;
- return 0;
-}
-
-// Add 10MS of raw (PCM) audio data to the encoder.
-int32_t AudioCodingModuleImpl::Add10MsData(
- const AudioFrame& audio_frame) {
- if (audio_frame.samples_per_channel_ <= 0) {
- assert(false);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot Add 10 ms audio, payload length is negative or "
- "zero");
- return -1;
- }
-
- // Allow for 8, 16, 32 and 48kHz input audio.
- if ((audio_frame.sample_rate_hz_ != 8000)
- && (audio_frame.sample_rate_hz_ != 16000)
- && (audio_frame.sample_rate_hz_ != 32000)
- && (audio_frame.sample_rate_hz_ != 48000)) {
- assert(false);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot Add 10 ms audio, input frequency not valid");
- return -1;
- }
-
- // If the length and frequency matches. We currently just support raw PCM.
- if ((audio_frame.sample_rate_hz_ / 100)
- != audio_frame.samples_per_channel_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot Add 10 ms audio, input frequency and length doesn't"
- " match");
- return -1;
- }
-
- if (audio_frame.num_channels_ != 1 && audio_frame.num_channels_ != 2) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot Add 10 ms audio, invalid number of channels.");
- return -1;
- }
-
- CriticalSectionScoped lock(acm_crit_sect_);
- // Do we have a codec registered?
- if (!HaveValidEncoder("Add10MsData")) {
- return -1;
- }
-
- const AudioFrame* ptr_frame;
- // Perform a resampling, also down-mix if it is required and can be
- // performed before resampling (a down mix prior to resampling will take
- // place if both primary and secondary encoders are mono and input is in
- // stereo).
- if (PreprocessToAddData(audio_frame, &ptr_frame) < 0) {
- return -1;
- }
- TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Audio", ptr_frame->timestamp_,
- "now", clock_->TimeInMilliseconds());
-
- // Check whether we need an up-mix or down-mix?
- bool remix = ptr_frame->num_channels_ != send_codec_inst_.channels;
- if (secondary_encoder_.get() != NULL) {
- remix = remix ||
- (ptr_frame->num_channels_ != secondary_send_codec_inst_.channels);
- }
-
- // If a re-mix is required (up or down), this buffer will store re-mixed
- // version of the input.
- int16_t buffer[WEBRTC_10MS_PCM_AUDIO];
- if (remix) {
- if (ptr_frame->num_channels_ == 1) {
- if (UpMix(*ptr_frame, WEBRTC_10MS_PCM_AUDIO, buffer) < 0)
- return -1;
- } else {
- if (DownMix(*ptr_frame, WEBRTC_10MS_PCM_AUDIO, buffer) < 0)
- return -1;
- }
- }
-
- // When adding data to encoders this pointer is pointing to an audio buffer
- // with correct number of channels.
- const int16_t* ptr_audio = ptr_frame->data_;
-
- // For pushing data to primary, point the |ptr_audio| to correct buffer.
- if (send_codec_inst_.channels != ptr_frame->num_channels_)
- ptr_audio = buffer;
-
- if (codecs_[current_send_codec_idx_]->Add10MsData(
- ptr_frame->timestamp_, ptr_audio, ptr_frame->samples_per_channel_,
- send_codec_inst_.channels) < 0)
- return -1;
-
- if (secondary_encoder_.get() != NULL) {
- // For pushing data to secondary, point the |ptr_audio| to correct buffer.
- ptr_audio = ptr_frame->data_;
- if (secondary_send_codec_inst_.channels != ptr_frame->num_channels_)
- ptr_audio = buffer;
-
- if (secondary_encoder_->Add10MsData(
- ptr_frame->timestamp_, ptr_audio, ptr_frame->samples_per_channel_,
- secondary_send_codec_inst_.channels) < 0)
- return -1;
- }
-
- return 0;
-}
-
-// Perform a resampling and down-mix if required. We down-mix only if
-// encoder is mono and input is stereo. In case of dual-streaming, both
-// encoders has to be mono for down-mix to take place.
-// |*ptr_out| will point to the pre-processed audio-frame. If no pre-processing
-// is required, |*ptr_out| points to |in_frame|.
-int AudioCodingModuleImpl::PreprocessToAddData(const AudioFrame& in_frame,
- const AudioFrame** ptr_out) {
- // Primary and secondary (if exists) should have the same sampling rate.
- assert((secondary_encoder_.get() != NULL) ?
- secondary_send_codec_inst_.plfreq == send_codec_inst_.plfreq : true);
-
- bool resample = static_cast<int32_t>(in_frame.sample_rate_hz_) !=
- send_codec_inst_.plfreq;
-
- // This variable is true if primary codec and secondary codec (if exists)
- // are both mono and input is stereo.
- bool down_mix;
- if (secondary_encoder_.get() != NULL) {
- down_mix = (in_frame.num_channels_ == 2) &&
- (send_codec_inst_.channels == 1) &&
- (secondary_send_codec_inst_.channels == 1);
- } else {
- down_mix = (in_frame.num_channels_ == 2) &&
- (send_codec_inst_.channels == 1);
- }
-
- if (!down_mix && !resample) {
- // No pre-processing is required.
- last_in_timestamp_ = in_frame.timestamp_;
- last_timestamp_ = in_frame.timestamp_;
- *ptr_out = &in_frame;
- return 0;
- }
-
- *ptr_out = &preprocess_frame_;
- preprocess_frame_.num_channels_ = in_frame.num_channels_;
- int16_t audio[WEBRTC_10MS_PCM_AUDIO];
- const int16_t* src_ptr_audio = in_frame.data_;
- int16_t* dest_ptr_audio = preprocess_frame_.data_;
- if (down_mix) {
- // If a resampling is required the output of a down-mix is written into a
- // local buffer, otherwise, it will be written to the output frame.
- if (resample)
- dest_ptr_audio = audio;
- if (DownMix(in_frame, WEBRTC_10MS_PCM_AUDIO, dest_ptr_audio) < 0)
- return -1;
- preprocess_frame_.num_channels_ = 1;
- // Set the input of the resampler is the down-mixed signal.
- src_ptr_audio = audio;
- }
-
- preprocess_frame_.timestamp_ = in_frame.timestamp_;
- preprocess_frame_.samples_per_channel_ = in_frame.samples_per_channel_;
- preprocess_frame_.sample_rate_hz_ = in_frame.sample_rate_hz_;
- // If it is required, we have to do a resampling.
- if (resample) {
- // The result of the resampler is written to output frame.
- dest_ptr_audio = preprocess_frame_.data_;
-
- uint32_t timestamp_diff;
-
- // Calculate the timestamp of this frame.
- if (last_in_timestamp_ > in_frame.timestamp_) {
- // A wrap around has happened.
- timestamp_diff = (static_cast<uint32_t>(0xFFFFFFFF) - last_in_timestamp_)
- + in_frame.timestamp_;
- } else {
- timestamp_diff = in_frame.timestamp_ - last_in_timestamp_;
- }
- preprocess_frame_.timestamp_ = last_timestamp_ +
- static_cast<uint32_t>(timestamp_diff *
- (static_cast<double>(send_codec_inst_.plfreq) /
- static_cast<double>(in_frame.sample_rate_hz_)));
-
- preprocess_frame_.samples_per_channel_ = input_resampler_.Resample10Msec(
- src_ptr_audio, in_frame.sample_rate_hz_, dest_ptr_audio,
- send_codec_inst_.plfreq, preprocess_frame_.num_channels_);
-
- if (preprocess_frame_.samples_per_channel_ < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot add 10 ms audio, resmapling failed");
- return -1;
- }
- preprocess_frame_.sample_rate_hz_ = send_codec_inst_.plfreq;
- }
- last_in_timestamp_ = in_frame.timestamp_;
- last_timestamp_ = preprocess_frame_.timestamp_;
-
- return 0;
-}
-
-/////////////////////////////////////////
-// (FEC) Forward Error Correction
-//
-
-bool AudioCodingModuleImpl::FECStatus() const {
- CriticalSectionScoped lock(acm_crit_sect_);
- return fec_enabled_;
-}
-
-// Configure FEC status i.e on/off.
-int32_t
-AudioCodingModuleImpl::SetFECStatus(
-#ifdef WEBRTC_CODEC_RED
- const bool enable_fec) {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (fec_enabled_ != enable_fec) {
- // Reset the RED buffer.
- memset(red_buffer_, 0, MAX_PAYLOAD_SIZE_BYTE);
-
- // Reset fragmentation buffers.
- ResetFragmentation(kNumFecFragmentationVectors);
- // Set fec_enabled_.
- fec_enabled_ = enable_fec;
- }
- is_first_red_ = true; // Make sure we restart FEC.
- return 0;
-#else
- const bool /* enable_fec */) {
- fec_enabled_ = false;
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
- " WEBRTC_CODEC_RED is undefined => fec_enabled_ = %d",
- fec_enabled_);
- return -1;
-#endif
-}
-
-/////////////////////////////////////////
-// (VAD) Voice Activity Detection
-//
-int32_t AudioCodingModuleImpl::SetVAD(bool enable_dtx, bool enable_vad,
- ACMVADMode mode) {
- CriticalSectionScoped lock(acm_crit_sect_);
- return SetVADSafe(enable_dtx, enable_vad, mode);
-}
-
-int AudioCodingModuleImpl::SetVADSafe(bool enable_dtx, bool enable_vad,
- ACMVADMode mode) {
- // Sanity check of the mode.
- if ((mode != VADNormal) && (mode != VADLowBitrate)
- && (mode != VADAggr) && (mode != VADVeryAggr)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Invalid VAD Mode %d, no change is made to VAD/DTX status",
- static_cast<int>(mode));
- return -1;
- }
-
- // Check that the send codec is mono. We don't support VAD/DTX for stereo
- // sending.
- if ((enable_dtx || enable_vad) && stereo_send_) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "VAD/DTX not supported for stereo sending.");
- dtx_enabled_ = false;
- vad_enabled_ = false;
- vad_mode_ = mode;
- return -1;
- }
-
- // We don't support VAD/DTX when dual-streaming is enabled, i.e.
- // secondary-encoder is registered.
- if ((enable_dtx || enable_vad) && secondary_encoder_.get() != NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "VAD/DTX not supported when dual-streaming is enabled.");
- dtx_enabled_ = false;
- vad_enabled_ = false;
- vad_mode_ = mode;
- return -1;
- }
-
- // Store VAD/DTX settings. Values can be changed in the call to "SetVAD"
- // below.
- dtx_enabled_ = enable_dtx;
- vad_enabled_ = enable_vad;
- vad_mode_ = mode;
-
- // If a send codec is registered, set VAD/DTX for the codec.
- if (HaveValidEncoder("SetVAD")) {
- if (codecs_[current_send_codec_idx_]->SetVAD(&dtx_enabled_, &vad_enabled_,
- &vad_mode_) < 0) {
- // SetVAD failed.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "SetVAD failed");
- dtx_enabled_ = false;
- vad_enabled_ = false;
- return -1;
- }
- }
-
- return 0;
-}
-
-// Get VAD/DTX settings.
-// TODO(tlegrand): Change this method to void.
-int32_t AudioCodingModuleImpl::VAD(bool* dtx_enabled, bool* vad_enabled,
- ACMVADMode* mode) const {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- *dtx_enabled = dtx_enabled_;
- *vad_enabled = vad_enabled_;
- *mode = vad_mode_;
-
- return 0;
-}
-
-/////////////////////////////////////////
-// Receiver
-//
-
-int32_t AudioCodingModuleImpl::InitializeReceiver() {
- CriticalSectionScoped lock(acm_crit_sect_);
- return InitializeReceiverSafe();
-}
-
-// Initialize receiver, resets codec database etc.
-int32_t AudioCodingModuleImpl::InitializeReceiverSafe() {
- initial_delay_ms_ = 0;
- num_packets_accumulated_ = 0;
- num_bytes_accumulated_ = 0;
- accumulated_audio_ms_ = 0;
- first_payload_received_ = 0;
- last_incoming_send_timestamp_ = 0;
- track_neteq_buffer_ = false;
- playout_ts_ = 0;
- // If the receiver is already initialized then we want to destroy any
- // existing decoders. After a call to this function, we should have a clean
- // start-up.
- if (receiver_initialized_) {
- for (int i = 0; i < ACMCodecDB::kNumCodecs; i++) {
- if (UnregisterReceiveCodecSafe(i) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "InitializeReceiver() failed, Could not unregister codec");
- return -1;
- }
- }
- }
- if (neteq_.Init() != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "InitializeReceiver() failed, Could not initialize NetEQ");
- return -1;
- }
- neteq_.set_id(id_);
- if (neteq_.AllocatePacketBuffer(ACMCodecDB::NetEQDecoders(),
- ACMCodecDB::kNumCodecs) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "NetEQ cannot allocate_packet Buffer");
- return -1;
- }
-
- // Register RED and CN.
- for (int i = 0; i < ACMCodecDB::kNumCodecs; i++) {
- if (IsCodecRED(i) || IsCodecCN(i)) {
- if (RegisterRecCodecMSSafe(ACMCodecDB::database_[i], i, i,
- ACMNetEQ::kMasterJb) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot register master codec.");
- return -1;
- }
- registered_pltypes_[i] = ACMCodecDB::database_[i].pltype;
- }
- }
-
- receiver_initialized_ = true;
- return 0;
-}
-
-// Reset the decoder state.
-int32_t AudioCodingModuleImpl::ResetDecoder() {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- for (int id = 0; id < ACMCodecDB::kMaxNumCodecs; id++) {
- if ((codecs_[id] != NULL) && (registered_pltypes_[id] != -1)) {
- if (codecs_[id]->ResetDecoder(registered_pltypes_[id]) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "ResetDecoder failed:");
- return -1;
- }
- }
- }
- return neteq_.FlushBuffers();
-}
-
-// Get current receive frequency.
-int32_t AudioCodingModuleImpl::ReceiveFrequency() const {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "ReceiveFrequency()");
- WebRtcACMCodecParams codec_params;
-
- CriticalSectionScoped lock(acm_crit_sect_);
- if (DecoderParamByPlType(last_recv_audio_codec_pltype_, codec_params) < 0) {
- return neteq_.CurrentSampFreqHz();
- } else if (codec_params.codec_inst.plfreq == 48000) {
- // TODO(tlegrand): Remove this option when we have full 48 kHz support.
- return 32000;
- } else {
- return codec_params.codec_inst.plfreq;
- }
-}
-
-// Get current playout frequency.
-int32_t AudioCodingModuleImpl::PlayoutFrequency() const {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "PlayoutFrequency()");
-
- CriticalSectionScoped lock(acm_crit_sect_);
-
- return neteq_.CurrentSampFreqHz();
-}
-
-// Register possible receive codecs, can be called multiple times,
-// for codecs, CNG (NB, WB and SWB), DTMF, RED.
-int32_t AudioCodingModuleImpl::RegisterReceiveCodec(
- const CodecInst& receive_codec) {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (receive_codec.channels > 2) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "More than 2 audio channel is not supported.");
- return -1;
- }
-
- int mirror_id;
- int codec_id = ACMCodecDB::ReceiverCodecNumber(&receive_codec, &mirror_id);
-
- if (codec_id < 0 || codec_id >= ACMCodecDB::kNumCodecs) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Wrong codec params to be registered as receive codec");
- return -1;
- }
- // Check if the payload-type is valid.
- if (!ACMCodecDB::ValidPayloadType(receive_codec.pltype)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Invalid payload-type %d for %s.", receive_codec.pltype,
- receive_codec.plname);
- return -1;
- }
-
- if (!receiver_initialized_) {
- if (InitializeReceiverSafe() < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot initialize reciver, so failed registering a codec.");
- return -1;
- }
- }
-
- // If codec already registered, unregister. Except for CN where we only
- // unregister if payload type is changing.
- if ((registered_pltypes_[codec_id] == receive_codec.pltype)
- && IsCodecCN(&receive_codec)) {
- // Codec already registered as receiver with this payload type. Nothing
- // to be done.
- return 0;
- } else if (registered_pltypes_[codec_id] != -1) {
- if (UnregisterReceiveCodecSafe(codec_id) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot register master codec.");
- return -1;
- }
- }
-
- if (RegisterRecCodecMSSafe(receive_codec, codec_id, mirror_id,
- ACMNetEQ::kMasterJb) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot register master codec.");
- return -1;
- }
-
- // TODO(andrew): Refactor how the slave is initialized. Can we instead
- // always start up a slave and pre-register CN and RED? We should be able
- // to get rid of stereo_receive_registered_.
- // http://code.google.com/p/webrtc/issues/detail?id=453
-
- // Register stereo codecs with the slave, or, if we've had already seen a
- // stereo codec, register CN or RED as a special case.
- if (receive_codec.channels == 2 ||
- (stereo_receive_registered_ && (IsCodecCN(&receive_codec) ||
- IsCodecRED(&receive_codec)))) {
- // TODO(andrew): refactor this block to combine with InitStereoSlave().
-
- if (!stereo_receive_registered_) {
- // This is the first time a stereo codec has been registered. Make
- // some stereo preparations.
-
- // Add a stereo slave.
- assert(neteq_.num_slaves() == 0);
- if (neteq_.AddSlave(ACMCodecDB::NetEQDecoders(),
- ACMCodecDB::kNumCodecs) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot add slave jitter buffer to NetEQ.");
- return -1;
- }
-
- // Register any existing CN or RED codecs with the slave and as stereo.
- for (int i = 0; i < ACMCodecDB::kNumCodecs; i++) {
- if (registered_pltypes_[i] != -1 && (IsCodecRED(i) || IsCodecCN(i))) {
- stereo_receive_[i] = true;
-
- CodecInst codec;
- memcpy(&codec, &ACMCodecDB::database_[i], sizeof(CodecInst));
- codec.pltype = registered_pltypes_[i];
- if (RegisterRecCodecMSSafe(codec, i, i, ACMNetEQ::kSlaveJb) < 0) {
- WEBRTC_TRACE(kTraceError, kTraceAudioCoding, id_,
- "Cannot register slave codec.");
- return -1;
- }
- }
- }
- }
-
- if (RegisterRecCodecMSSafe(receive_codec, codec_id, mirror_id,
- ACMNetEQ::kSlaveJb) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot register slave codec.");
- return -1;
- }
-
- if (!stereo_receive_[codec_id] &&
- (last_recv_audio_codec_pltype_ == receive_codec.pltype)) {
- // The last received payload type is the same as the one we are
- // registering. Expected number of channels to receive is one (mono),
- // but we are now registering the receiving codec as stereo (number of
- // channels is 2).
- // Set |last_recv_audio_coded_pltype_| to invalid value to trigger a
- // flush in NetEq, and a reset of expected number of channels next time a
- // packet is received in AudioCodingModuleImpl::IncomingPacket().
- last_recv_audio_codec_pltype_ = -1;
- }
-
- stereo_receive_[codec_id] = true;
- stereo_receive_registered_ = true;
- } else {
- if (last_recv_audio_codec_pltype_ == receive_codec.pltype &&
- expected_channels_ == 2) {
- // The last received payload type is the same as the one we are
- // registering. Expected number of channels to receive is two (stereo),
- // but we are now registering the receiving codec as mono (number of
- // channels is 1).
- // Set |last_recv_audio_coded_pl_type_| to invalid value to trigger a
- // flush in NetEq, and a reset of expected number of channels next time a
- // packet is received in AudioCodingModuleImpl::IncomingPacket().
- last_recv_audio_codec_pltype_ = -1;
- }
- stereo_receive_[codec_id] = false;
- }
-
- registered_pltypes_[codec_id] = receive_codec.pltype;
-
- if (IsCodecRED(&receive_codec)) {
- receive_red_pltype_ = receive_codec.pltype;
- }
- return 0;
-}
-
-int32_t AudioCodingModuleImpl::RegisterRecCodecMSSafe(
- const CodecInst& receive_codec, int16_t codec_id,
- int16_t mirror_id, ACMNetEQ::JitterBuffer jitter_buffer) {
- ACMGenericCodec** codecs;
- if (jitter_buffer == ACMNetEQ::kMasterJb) {
- codecs = &codecs_[0];
- } else if (jitter_buffer == ACMNetEQ::kSlaveJb) {
- codecs = &slave_codecs_[0];
- if (codecs_[codec_id]->IsTrueStereoCodec()) {
- // True stereo codecs need to use the same codec memory
- // for both master and slave.
- slave_codecs_[mirror_id] = codecs_[mirror_id];
- mirror_codec_idx_[mirror_id] = mirror_id;
- }
- } else {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "RegisterReceiveCodecMSSafe failed, jitter_buffer is neither "
- "master or slave ");
- return -1;
- }
-
- if (codecs[mirror_id] == NULL) {
- codecs[mirror_id] = CreateCodec(receive_codec);
- if (codecs[mirror_id] == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot create codec to register as receive codec");
- return -1;
- }
- mirror_codec_idx_[mirror_id] = mirror_id;
- }
- if (mirror_id != codec_id) {
- codecs[codec_id] = codecs[mirror_id];
- mirror_codec_idx_[codec_id] = mirror_id;
- }
-
- codecs[codec_id]->SetIsMaster(jitter_buffer == ACMNetEQ::kMasterJb);
-
- int16_t status = 0;
- WebRtcACMCodecParams codec_params;
- memcpy(&(codec_params.codec_inst), &receive_codec, sizeof(CodecInst));
- codec_params.enable_vad = false;
- codec_params.enable_dtx = false;
- codec_params.vad_mode = VADNormal;
- if (!codecs[codec_id]->DecoderInitialized()) {
- // Force initialization.
- status = codecs[codec_id]->InitDecoder(&codec_params, true);
- if (status < 0) {
- // Could not initialize the decoder, we don't want to
- // continue if we could not initialize properly.
- WEBRTC_TRACE(
- webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "could not initialize the receive codec, codec not registered");
-
- return -1;
- }
- } else if (mirror_id != codec_id) {
- // Currently this only happens for iSAC.
- // We have to store the decoder parameters.
- codecs[codec_id]->SaveDecoderParam(&codec_params);
- }
-
- if (codecs[codec_id]->RegisterInNetEq(&neteq_, receive_codec) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Receive codec could not be registered in NetEQ");
- return -1;
- }
- // Guarantee that the same payload-type that is
- // registered in NetEQ is stored in the codec.
- codecs[codec_id]->SaveDecoderParam(&codec_params);
-
- return status;
-}
-
-// Get current received codec.
-int32_t AudioCodingModuleImpl::ReceiveCodec(
- CodecInst* current_codec) const {
- WebRtcACMCodecParams decoder_param;
- CriticalSectionScoped lock(acm_crit_sect_);
-
- for (int id = 0; id < ACMCodecDB::kMaxNumCodecs; id++) {
- if (codecs_[id] != NULL) {
- if (codecs_[id]->DecoderInitialized()) {
- if (codecs_[id]->DecoderParams(&decoder_param,
- last_recv_audio_codec_pltype_)) {
- memcpy(current_codec, &decoder_param.codec_inst,
- sizeof(CodecInst));
- return 0;
- }
- }
- }
- }
-
- // If we are here then we haven't found any codec. Set codec pltype to -1 to
- // indicate that the structure is invalid and return -1.
- current_codec->pltype = -1;
- return -1;
-}
-
-// Incoming packet from network parsed and ready for decode.
-int32_t AudioCodingModuleImpl::IncomingPacket(
- const uint8_t* incoming_payload,
- const int32_t payload_length,
- const WebRtcRTPHeader& rtp_info) {
- WebRtcRTPHeader rtp_header;
-
- memcpy(&rtp_header, &rtp_info, sizeof(WebRtcRTPHeader));
-
- if (payload_length < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "IncomingPacket() Error, payload-length cannot be negative");
- return -1;
- }
-
- {
- // Store the payload Type. This will be used to retrieve "received codec"
- // and "received frequency."
- CriticalSectionScoped lock(acm_crit_sect_);
-
- // Check there are packets missed between the last injected packet, and the
- // latest received packet. If so and we are in AV-sync mode then we would
- // like to fill the gap. Shouldn't be the first payload.
- if (av_sync_ && first_payload_received_ &&
- rtp_info.header.sequenceNumber > last_sequence_number_ + 1) {
- // If the last packet pushed was sync-packet account for all missing
- // packets. Otherwise leave some room for PLC.
- if (last_packet_was_sync_) {
- while (rtp_info.header.sequenceNumber > last_sequence_number_ + 2) {
- PushSyncPacketSafe();
- }
- } else {
- // Leave two packet room for NetEq perform PLC.
- if (rtp_info.header.sequenceNumber > last_sequence_number_ + 3) {
- last_sequence_number_ += 2;
- last_incoming_send_timestamp_ += last_timestamp_diff_ * 2;
- last_receive_timestamp_ += 2 * last_timestamp_diff_;
- while (rtp_info.header.sequenceNumber > last_sequence_number_ + 1)
- PushSyncPacketSafe();
- }
- }
- }
-
- uint8_t my_payload_type;
-
- // Check if this is an RED payload.
- if (rtp_info.header.payloadType == receive_red_pltype_) {
- // Get the primary payload-type.
- my_payload_type = incoming_payload[0] & 0x7F;
- } else {
- my_payload_type = rtp_info.header.payloadType;
- }
-
- // If payload is audio, check if received payload is different from
- // previous.
- if (!rtp_info.type.Audio.isCNG) {
- // This is Audio not CNG.
-
- if (my_payload_type != last_recv_audio_codec_pltype_) {
- // We detect a change in payload type. It is necessary for iSAC
- // we are going to use ONE iSAC instance for decoding both WB and
- // SWB payloads. If payload is changed there might be a need to reset
- // sampling rate of decoder. depending what we have received "now".
- for (int i = 0; i < ACMCodecDB::kMaxNumCodecs; i++) {
- if (registered_pltypes_[i] == my_payload_type) {
- if (UpdateUponReceivingCodec(i) != 0)
- return -1;
- break;
- }
- }
- // Codec is changed, there might be a jump in timestamp, therefore,
- // we have to reset some variables that track NetEq buffer.
- if (track_neteq_buffer_ || av_sync_) {
- last_incoming_send_timestamp_ = rtp_info.header.timestamp;
- }
-
- if (nack_enabled_) {
- assert(nack_.get());
- // Codec is changed, reset NACK and update sampling rate.
- nack_->Reset();
- nack_->UpdateSampleRate(
- ACMCodecDB::database_[current_receive_codec_idx_].plfreq);
- }
- }
- last_recv_audio_codec_pltype_ = my_payload_type;
- }
-
- // Current timestamp based on the receiver sampling frequency.
- last_receive_timestamp_ = NowTimestamp(current_receive_codec_idx_);
-
- if (nack_enabled_) {
- assert(nack_.get());
- nack_->UpdateLastReceivedPacket(rtp_header.header.sequenceNumber,
- rtp_header.header.timestamp);
- }
- }
-
- int per_neteq_payload_length = payload_length;
- // Split the payload for stereo packets, so that first half of payload
- // vector holds left channel, and second half holds right channel.
- if (expected_channels_ == 2) {
- if (!rtp_info.type.Audio.isCNG) {
- // Create a new vector for the payload, maximum payload size.
- int32_t length = payload_length;
- uint8_t payload[kMaxPacketSize];
- assert(payload_length <= kMaxPacketSize);
- memcpy(payload, incoming_payload, payload_length);
- codecs_[current_receive_codec_idx_]->SplitStereoPacket(payload, &length);
- rtp_header.type.Audio.channel = 2;
- per_neteq_payload_length = length / 2;
- // Insert packet into NetEQ.
- if (neteq_.RecIn(payload, length, rtp_header,
- last_receive_timestamp_) < 0)
- return -1;
- } else {
- // If we receive a CNG packet while expecting stereo, we ignore the
- // packet and continue. CNG is not supported for stereo.
- return 0;
- }
- } else {
- if (neteq_.RecIn(incoming_payload, payload_length, rtp_header,
- last_receive_timestamp_) < 0)
- return -1;
- }
-
- {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- // Update buffering uses |last_incoming_send_timestamp_| so it should be
- // before the next block.
- if (track_neteq_buffer_)
- UpdateBufferingSafe(rtp_header, per_neteq_payload_length);
-
- if (av_sync_) {
- if (rtp_info.header.sequenceNumber == last_sequence_number_ + 1) {
- last_timestamp_diff_ = rtp_info.header.timestamp -
- last_incoming_send_timestamp_;
- }
- last_sequence_number_ = rtp_info.header.sequenceNumber;
- last_ssrc_ = rtp_info.header.ssrc;
- last_packet_was_sync_ = false;
- }
-
- if (av_sync_ || track_neteq_buffer_) {
- last_incoming_send_timestamp_ = rtp_info.header.timestamp;
- }
-
- // Set the following regardless of tracking NetEq buffer or being in
- // AV-sync mode. Only if the received packet is not CNG.
- if (!rtp_info.type.Audio.isCNG)
- first_payload_received_ = true;
- }
- return 0;
-}
-
-int AudioCodingModuleImpl::UpdateUponReceivingCodec(int index) {
- if (codecs_[index] == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceAudioCoding, id_,
- "IncomingPacket() error: payload type found but "
- "corresponding codec is NULL");
- return -1;
- }
- codecs_[index]->UpdateDecoderSampFreq(index);
- neteq_.set_received_stereo(stereo_receive_[index]);
- current_receive_codec_idx_ = index;
-
- // If we have a change in the expected number of channels, flush packet
- // buffers in NetEQ.
- if ((stereo_receive_[index] && (expected_channels_ == 1)) ||
- (!stereo_receive_[index] && (expected_channels_ == 2))) {
- neteq_.FlushBuffers();
- codecs_[index]->ResetDecoder(registered_pltypes_[index]);
- }
-
- if (stereo_receive_[index] && (expected_channels_ == 1)) {
- // When switching from a mono to stereo codec reset the slave.
- if (InitStereoSlave() != 0)
- return -1;
- }
-
- // Store number of channels we expect to receive for the current payload type.
- if (stereo_receive_[index]) {
- expected_channels_ = 2;
- } else {
- expected_channels_ = 1;
- }
-
- // Reset previous received channel.
- prev_received_channel_ = 0;
- return 0;
-}
-
-bool AudioCodingModuleImpl::IsCodecForSlave(int index) const {
- return (registered_pltypes_[index] != -1 && stereo_receive_[index]);
-}
-
-int AudioCodingModuleImpl::InitStereoSlave() {
- neteq_.RemoveSlaves();
-
- if (neteq_.AddSlave(ACMCodecDB::NetEQDecoders(),
- ACMCodecDB::kNumCodecs) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot add slave jitter buffer to NetEQ.");
- return -1;
- }
-
- // Register all needed codecs with slave.
- for (int i = 0; i < ACMCodecDB::kNumCodecs; i++) {
- if (codecs_[i] != NULL && IsCodecForSlave(i)) {
- WebRtcACMCodecParams decoder_params;
- if (codecs_[i]->DecoderParams(&decoder_params, registered_pltypes_[i])) {
- if (RegisterRecCodecMSSafe(decoder_params.codec_inst,
- i, ACMCodecDB::MirrorID(i),
- ACMNetEQ::kSlaveJb) < 0) {
- WEBRTC_TRACE(kTraceError, kTraceAudioCoding, id_,
- "Cannot register slave codec.");
- return -1;
- }
- }
- }
- }
- return 0;
-}
-
-int AudioCodingModuleImpl::SetMinimumPlayoutDelay(int time_ms) {
- {
- CriticalSectionScoped lock(acm_crit_sect_);
- // Don't let the extra delay modified while accumulating buffers in NetEq.
- if (track_neteq_buffer_ && first_payload_received_)
- return 0;
- }
- return neteq_.SetMinimumDelay(time_ms);
-}
-
-int AudioCodingModuleImpl::SetMaximumPlayoutDelay(int time_ms) {
- return neteq_.SetMaximumDelay(time_ms);
-}
-
-// Get Dtmf playout status.
-bool AudioCodingModuleImpl::DtmfPlayoutStatus() const {
-#ifndef WEBRTC_CODEC_AVT
- return false;
-#else
- return neteq_.avt_playout();
-#endif
-}
-
-// Configure Dtmf playout status i.e on/off playout the incoming outband
-// Dtmf tone.
-int32_t AudioCodingModuleImpl::SetDtmfPlayoutStatus(
-#ifndef WEBRTC_CODEC_AVT
- const bool /* enable */) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
- "SetDtmfPlayoutStatus() failed: AVT is not supported.");
- return -1;
-#else
- const bool enable) {
- return neteq_.SetAVTPlayout(enable);
-#endif
-}
-
-// Estimate the Bandwidth based on the incoming stream, needed for one way
-// audio where the RTCP send the BW estimate.
-// This is also done in the RTP module.
-int32_t AudioCodingModuleImpl::DecoderEstimatedBandwidth() const {
- CodecInst codec;
- int16_t codec_id = -1;
- int pltype_wb;
- int pltype_swb;
-
- // Get iSAC settings.
- for (int id = 0; id < ACMCodecDB::kNumCodecs; id++) {
- // Store codec settings for codec number "codeCntr" in the output struct.
- ACMCodecDB::Codec(id, &codec);
-
- if (!STR_CASE_CMP(codec.plname, "isac")) {
- codec_id = 1;
- pltype_wb = codec.pltype;
-
- ACMCodecDB::Codec(id + 1, &codec);
- pltype_swb = codec.pltype;
-
- break;
- }
- }
-
- if (codec_id < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "DecoderEstimatedBandwidth failed");
- return -1;
- }
-
- if ((last_recv_audio_codec_pltype_ == pltype_wb) ||
- (last_recv_audio_codec_pltype_ == pltype_swb)) {
- return codecs_[codec_id]->GetEstimatedBandwidth();
- } else {
- return -1;
- }
-}
-
-// Set playout mode for: voice, fax, or streaming.
-int32_t AudioCodingModuleImpl::SetPlayoutMode(
- const AudioPlayoutMode mode) {
- if ((mode != voice) && (mode != fax) && (mode != streaming) &&
- (mode != off)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Invalid playout mode.");
- return -1;
- }
- return neteq_.SetPlayoutMode(mode);
-}
-
-// Get playout mode voice, fax.
-AudioPlayoutMode AudioCodingModuleImpl::PlayoutMode() const {
- return neteq_.playout_mode();
-}
-
-// Get 10 milliseconds of raw audio data to play out.
-// Automatic resample to the requested frequency.
-int32_t AudioCodingModuleImpl::PlayoutData10Ms(
- int32_t desired_freq_hz, AudioFrame* audio_frame) {
- TRACE_EVENT_ASYNC_BEGIN0("webrtc", "ACM::PlayoutData10Ms", this);
- bool stereo_mode;
-
- if (GetSilence(desired_freq_hz, audio_frame)) {
- TRACE_EVENT_ASYNC_END1("webrtc", "ACM::PlayoutData10Ms", this,
- "silence", true);
- return 0; // Silence is generated, return.
- }
-
- // RecOut always returns 10 ms.
- if (neteq_.RecOut(audio_frame_) != 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "PlayoutData failed, RecOut Failed");
- return -1;
- }
- int decoded_seq_num;
- uint32_t decoded_timestamp;
- bool update_nack =
- neteq_.DecodedRtpInfo(&decoded_seq_num, &decoded_timestamp) &&
- nack_enabled_; // Update NACK only if it is enabled.
- audio_frame->num_channels_ = audio_frame_.num_channels_;
- audio_frame->vad_activity_ = audio_frame_.vad_activity_;
- audio_frame->speech_type_ = audio_frame_.speech_type_;
-
- stereo_mode = (audio_frame_.num_channels_ > 1);
-
- // For stereo playout:
- // Master and Slave samples are interleaved starting with Master.
- const uint16_t receive_freq =
- static_cast<uint16_t>(audio_frame_.sample_rate_hz_);
- bool tone_detected = false;
- int16_t last_detected_tone;
- int16_t tone;
-
- // Limit the scope of ACM Critical section.
- {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- // Update call statistics.
- call_stats_.DecodedByNetEq(audio_frame->speech_type_);
-
- if (update_nack) {
- assert(nack_.get());
- nack_->UpdateLastDecodedPacket(decoded_seq_num, decoded_timestamp);
- }
-
- // If we are in AV-sync and have already received an audio packet, but the
- // latest packet is too late, then insert sync packet.
- if (av_sync_ && first_payload_received_ &&
- NowTimestamp(current_receive_codec_idx_) > 5 * last_timestamp_diff_ +
- last_receive_timestamp_) {
- if (!last_packet_was_sync_) {
- // If the last packet inserted has been a regular packet Skip two
- // packets to give room for PLC.
- last_incoming_send_timestamp_ += 2 * last_timestamp_diff_;
- last_sequence_number_ += 2;
- last_receive_timestamp_ += 2 * last_timestamp_diff_;
- }
-
- // One sync packet.
- if (PushSyncPacketSafe() < 0)
- return -1;
- }
-
- if ((receive_freq != desired_freq_hz) && (desired_freq_hz != -1)) {
- TRACE_EVENT_ASYNC_END2("webrtc", "ACM::PlayoutData10Ms", this,
- "seqnum", decoded_seq_num,
- "now", clock_->TimeInMilliseconds());
- // Resample payload_data.
- int16_t temp_len = output_resampler_.Resample10Msec(
- audio_frame_.data_, receive_freq, audio_frame->data_,
- desired_freq_hz, audio_frame_.num_channels_);
-
- if (temp_len < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "PlayoutData failed, resampler failed");
- return -1;
- }
-
- // Set the payload data length from the resampler.
- audio_frame->samples_per_channel_ = static_cast<uint16_t>(temp_len);
- // Set the sampling frequency.
- audio_frame->sample_rate_hz_ = desired_freq_hz;
- } else {
- TRACE_EVENT_ASYNC_END2("webrtc", "ACM::PlayoutData10Ms", this,
- "seqnum", decoded_seq_num,
- "now", clock_->TimeInMilliseconds());
- memcpy(audio_frame->data_, audio_frame_.data_,
- audio_frame_.samples_per_channel_ * audio_frame->num_channels_
- * sizeof(int16_t));
- // Set the payload length.
- audio_frame->samples_per_channel_ =
- audio_frame_.samples_per_channel_;
- // Set the sampling frequency.
- audio_frame->sample_rate_hz_ = receive_freq;
- }
-
- // Tone detection done for master channel.
- if (dtmf_detector_ != NULL) {
- // Dtmf Detection.
- if (audio_frame->sample_rate_hz_ == 8000) {
- // Use audio_frame->data_ then Dtmf detector doesn't
- // need resampling.
- if (!stereo_mode) {
- dtmf_detector_->Detect(audio_frame->data_,
- audio_frame->samples_per_channel_,
- audio_frame->sample_rate_hz_, tone_detected,
- tone);
- } else {
- // We are in 8 kHz so the master channel needs only 80 samples.
- int16_t master_channel[80];
- for (int n = 0; n < 80; n++) {
- master_channel[n] = audio_frame->data_[n << 1];
- }
- dtmf_detector_->Detect(master_channel,
- audio_frame->samples_per_channel_,
- audio_frame->sample_rate_hz_, tone_detected,
- tone);
- }
- } else {
- // Do the detection on the audio that we got from NetEQ (audio_frame_).
- if (!stereo_mode) {
- dtmf_detector_->Detect(audio_frame_.data_,
- audio_frame_.samples_per_channel_,
- receive_freq, tone_detected, tone);
- } else {
- int16_t master_channel[WEBRTC_10MS_PCM_AUDIO];
- for (int n = 0; n < audio_frame_.samples_per_channel_; n++) {
- master_channel[n] = audio_frame_.data_[n << 1];
- }
- dtmf_detector_->Detect(master_channel,
- audio_frame_.samples_per_channel_,
- receive_freq, tone_detected, tone);
- }
- }
- }
-
- // We want to do this while we are in acm_crit_sect_.
- // (Doesn't really need to initialize the following
- // variable but Linux complains if we don't.)
- last_detected_tone = kACMToneEnd;
- if (tone_detected) {
- last_detected_tone = last_detected_tone_;
- last_detected_tone_ = tone;
- }
- }
-
- if (tone_detected) {
- // We will deal with callback here, so enter callback critical section.
- CriticalSectionScoped lock(callback_crit_sect_);
-
- if (dtmf_callback_ != NULL) {
- if (tone != kACMToneEnd) {
- // just a tone
- dtmf_callback_->IncomingDtmf(static_cast<uint8_t>(tone), false);
- } else if ((tone == kACMToneEnd) && (last_detected_tone != kACMToneEnd)) {
- // The tone is "END" and the previously detected tone is
- // not "END," so call fir an end.
- dtmf_callback_->IncomingDtmf(static_cast<uint8_t>(last_detected_tone),
- true);
- }
- }
- }
-
- audio_frame->id_ = id_;
- audio_frame->energy_ = -1;
- audio_frame->timestamp_ = 0;
-
- return 0;
-}
-
-/////////////////////////////////////////
-// Statistics
-//
-
-int32_t AudioCodingModuleImpl::NetworkStatistics(
- ACMNetworkStatistics* statistics) {
- int32_t status;
- status = neteq_.NetworkStatistics(statistics);
- return status;
-}
-
-void AudioCodingModuleImpl::DestructEncoderInst(void* inst) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, id_,
- "DestructEncoderInst()");
- if (!HaveValidEncoder("DestructEncoderInst")) {
- return;
- }
-
- codecs_[current_send_codec_idx_]->DestructEncoderInst(inst);
-}
-
-int16_t AudioCodingModuleImpl::AudioBuffer(
- WebRtcACMAudioBuff& buffer) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, id_,
- "AudioBuffer()");
- if (!HaveValidEncoder("AudioBuffer")) {
- return -1;
- }
- buffer.last_in_timestamp = last_in_timestamp_;
- return codecs_[current_send_codec_idx_]->AudioBuffer(buffer);
-}
-
-int16_t AudioCodingModuleImpl::SetAudioBuffer(
- WebRtcACMAudioBuff& buffer) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, id_,
- "SetAudioBuffer()");
- if (!HaveValidEncoder("SetAudioBuffer")) {
- return -1;
- }
- return codecs_[current_send_codec_idx_]->SetAudioBuffer(buffer);
-}
-
-uint32_t AudioCodingModuleImpl::EarliestTimestamp() const {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, id_,
- "EarliestTimestamp()");
- if (!HaveValidEncoder("EarliestTimestamp")) {
- return -1;
- }
- return codecs_[current_send_codec_idx_]->EarliestTimestamp();
-}
-
-int32_t AudioCodingModuleImpl::RegisterVADCallback(
- ACMVADCallback* vad_callback) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, id_,
- "RegisterVADCallback()");
- CriticalSectionScoped lock(callback_crit_sect_);
- vad_callback_ = vad_callback;
- return 0;
-}
-
-// TODO(turajs): Remove this API if it is not used.
-// TODO(tlegrand): Modify this function to work for stereo, and add tests.
-// TODO(turajs): Receive timestamp in this method is incremented by frame-size
-// and does not reflect the true receive frame-size. Therefore, subsequent
-// jitter computations are not accurate.
-int32_t AudioCodingModuleImpl::IncomingPayload(
- const uint8_t* incoming_payload, const int32_t payload_length,
- const uint8_t payload_type, const uint32_t timestamp) {
- if (payload_length < 0) {
- // Log error in trace file.
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "IncomingPacket() Error, payload-length cannot be negative");
- return -1;
- }
-
- if (dummy_rtp_header_ == NULL) {
- // This is the first time that we are using |dummy_rtp_header_|
- // so we have to create it.
- WebRtcACMCodecParams codec_params;
- dummy_rtp_header_ = new WebRtcRTPHeader;
- if (dummy_rtp_header_ == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "IncomingPayload() Error, out of memory");
- return -1;
- }
- dummy_rtp_header_->header.payloadType = payload_type;
- // Don't matter in this case.
- dummy_rtp_header_->header.ssrc = 0;
- dummy_rtp_header_->header.markerBit = false;
- // Start with random numbers.
- dummy_rtp_header_->header.sequenceNumber = rand();
- dummy_rtp_header_->header.timestamp =
- (static_cast<uint32_t>(rand()) << 16) +
- static_cast<uint32_t>(rand());
- dummy_rtp_header_->type.Audio.channel = 1;
-
- if (DecoderParamByPlType(payload_type, codec_params) < 0) {
- // We didn't find a codec with the given payload.
- // Something is wrong we exit, but we delete |dummy_rtp_header_|
- // and set it to NULL to start clean next time.
- delete dummy_rtp_header_;
- dummy_rtp_header_ = NULL;
- return -1;
- }
- recv_pl_frame_size_smpls_ = codec_params.codec_inst.pacsize;
- }
-
- if (payload_type != dummy_rtp_header_->header.payloadType) {
- // Payload type has changed since the last time we might need to
- // update the frame-size.
- WebRtcACMCodecParams codec_params;
- if (DecoderParamByPlType(payload_type, codec_params) < 0) {
- // We didn't find a codec with the given payload.
- return -1;
- }
- recv_pl_frame_size_smpls_ = codec_params.codec_inst.pacsize;
- dummy_rtp_header_->header.payloadType = payload_type;
- }
-
- if (timestamp > 0) {
- dummy_rtp_header_->header.timestamp = timestamp;
- }
-
- // Store the payload Type. this will be used to retrieve "received codec"
- // and "received frequency."
- last_recv_audio_codec_pltype_ = payload_type;
-
- last_receive_timestamp_ += recv_pl_frame_size_smpls_;
- // Insert in NetEQ.
- if (neteq_.RecIn(incoming_payload, payload_length, *dummy_rtp_header_,
- last_receive_timestamp_) < 0) {
- return -1;
- }
-
- // Get ready for the next payload.
- dummy_rtp_header_->header.sequenceNumber++;
- dummy_rtp_header_->header.timestamp += recv_pl_frame_size_smpls_;
- return 0;
-}
-
-int16_t AudioCodingModuleImpl::DecoderParamByPlType(
- const uint8_t payload_type,
- WebRtcACMCodecParams& codec_params) const {
- CriticalSectionScoped lock(acm_crit_sect_);
- for (int16_t id = 0; id < ACMCodecDB::kMaxNumCodecs;
- id++) {
- if (codecs_[id] != NULL) {
- if (codecs_[id]->DecoderInitialized()) {
- if (codecs_[id]->DecoderParams(&codec_params, payload_type)) {
- return 0;
- }
- }
- }
- }
- // If we are here it means that we could not find a
- // codec with that payload type. reset the values to
- // not acceptable values and return -1.
- codec_params.codec_inst.plname[0] = '\0';
- codec_params.codec_inst.pacsize = 0;
- codec_params.codec_inst.rate = 0;
- codec_params.codec_inst.pltype = -1;
- return -1;
-}
-
-int16_t AudioCodingModuleImpl::DecoderListIDByPlName(
- const char* name, const uint16_t frequency) const {
- WebRtcACMCodecParams codec_params;
- CriticalSectionScoped lock(acm_crit_sect_);
- for (int16_t id = 0; id < ACMCodecDB::kMaxNumCodecs; id++) {
- if ((codecs_[id] != NULL)) {
- if (codecs_[id]->DecoderInitialized()) {
- assert(registered_pltypes_[id] >= 0);
- assert(registered_pltypes_[id] <= 255);
- codecs_[id]->DecoderParams(
- &codec_params, static_cast<uint8_t>(registered_pltypes_[id]));
- if (!STR_CASE_CMP(codec_params.codec_inst.plname, name)) {
- // Check if the given sampling frequency matches.
- // A zero sampling frequency means we matching the names
- // is sufficient and we don't need to check for the
- // frequencies.
- // Currently it is only iSAC which has one name but two
- // sampling frequencies.
- if ((frequency == 0)||
- (codec_params.codec_inst.plfreq == frequency)) {
- return id;
- }
- }
- }
- }
- }
- // If we are here it means that we could not find a
- // codec with that payload type. return -1.
- return -1;
-}
-
-int32_t AudioCodingModuleImpl::LastEncodedTimestamp(
- uint32_t& timestamp) const {
- CriticalSectionScoped lock(acm_crit_sect_);
- if (!HaveValidEncoder("LastEncodedTimestamp")) {
- return -1;
- }
- timestamp = codecs_[current_send_codec_idx_]->LastEncodedTimestamp();
- return 0;
-}
-
-int32_t AudioCodingModuleImpl::ReplaceInternalDTXWithWebRtc(
- bool use_webrtc_dtx) {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (!HaveValidEncoder("ReplaceInternalDTXWithWebRtc")) {
- WEBRTC_TRACE(
- webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Cannot replace codec internal DTX when no send codec is registered.");
- return -1;
- }
-
- int32_t res = codecs_[current_send_codec_idx_]->ReplaceInternalDTX(
- use_webrtc_dtx);
- // Check if VAD is turned on, or if there is any error.
- if (res == 1) {
- vad_enabled_ = true;
- } else if (res < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Failed to set ReplaceInternalDTXWithWebRtc(%d)",
- use_webrtc_dtx);
- return res;
- }
-
- return 0;
-}
-
-int32_t AudioCodingModuleImpl::IsInternalDTXReplacedWithWebRtc(
- bool* uses_webrtc_dtx) {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (!HaveValidEncoder("IsInternalDTXReplacedWithWebRtc")) {
- return -1;
- }
- if (codecs_[current_send_codec_idx_]->IsInternalDTXReplaced(uses_webrtc_dtx)
- < 0) {
- return -1;
- }
- return 0;
-}
-
-int AudioCodingModuleImpl::SetISACMaxRate(int max_bit_per_sec) {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (!HaveValidEncoder("SetISACMaxRate")) {
- return -1;
- }
-
- return codecs_[current_send_codec_idx_]->SetISACMaxRate(max_bit_per_sec);
-}
-
-int AudioCodingModuleImpl::SetISACMaxPayloadSize(int max_size_bytes) {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (!HaveValidEncoder("SetISACMaxPayloadSize")) {
- return -1;
- }
-
- return codecs_[current_send_codec_idx_]->SetISACMaxPayloadSize(
- max_size_bytes);
-}
-
-int32_t AudioCodingModuleImpl::ConfigISACBandwidthEstimator(
- int frame_size_ms,
- int rate_bit_per_sec,
- bool enforce_frame_size) {
- CriticalSectionScoped lock(acm_crit_sect_);
-
- if (!HaveValidEncoder("ConfigISACBandwidthEstimator")) {
- return -1;
- }
-
- return codecs_[current_send_codec_idx_]->ConfigISACBandwidthEstimator(
- frame_size_ms, rate_bit_per_sec, enforce_frame_size);
-}
-
-int32_t AudioCodingModuleImpl::PlayoutTimestamp(
- uint32_t* timestamp) {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
- "PlayoutTimestamp()");
- {
- CriticalSectionScoped lock(acm_crit_sect_);
- if (track_neteq_buffer_) {
- *timestamp = playout_ts_;
- return 0;
- }
- }
- return neteq_.PlayoutTimestamp(*timestamp);
-}
-
-bool AudioCodingModuleImpl::HaveValidEncoder(const char* caller_name) const {
- if ((!send_codec_registered_) || (current_send_codec_idx_ < 0) ||
- (current_send_codec_idx_ >= ACMCodecDB::kNumCodecs)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "%s failed: No send codec is registered.", caller_name);
- return false;
- }
- if ((current_send_codec_idx_ < 0) ||
- (current_send_codec_idx_ >= ACMCodecDB::kNumCodecs)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "%s failed: Send codec index out of range.", caller_name);
- return false;
- }
- if (codecs_[current_send_codec_idx_] == NULL) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "%s failed: Send codec is NULL pointer.", caller_name);
- return false;
- }
- return true;
-}
-
-int AudioCodingModuleImpl::UnregisterReceiveCodec(uint8_t payload_type) {
- CriticalSectionScoped lock(acm_crit_sect_);
- int id;
-
- // Search through the list of registered payload types.
- for (id = 0; id < ACMCodecDB::kMaxNumCodecs; id++) {
- if (registered_pltypes_[id] == payload_type) {
- // We have found the id registered with the payload type.
- break;
- }
- }
-
- if (id >= ACMCodecDB::kNumCodecs) {
- // Payload type was not registered. No need to unregister.
- return 0;
- }
-
- // Unregister the codec with the given payload type.
- return UnregisterReceiveCodecSafe(id);
-}
-
-int32_t AudioCodingModuleImpl::UnregisterReceiveCodecSafe(
- const int16_t codec_id) {
- const WebRtcNetEQDecoder *neteq_decoder = ACMCodecDB::NetEQDecoders();
- int16_t mirror_id = ACMCodecDB::MirrorID(codec_id);
- bool stereo_receiver = false;
-
- if (codecs_[codec_id] != NULL) {
- if (registered_pltypes_[codec_id] != -1) {
- // Store stereo information for future use.
- stereo_receiver = stereo_receive_[codec_id];
-
- // Before deleting the decoder instance unregister from NetEQ.
- if (neteq_.RemoveCodec(neteq_decoder[codec_id],
- stereo_receive_[codec_id]) < 0) {
- CodecInst codec;
- ACMCodecDB::Codec(codec_id, &codec);
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
- "Unregistering %s-%d from NetEQ failed.", codec.plname,
- codec.plfreq);
- return -1;
- }
-
- // CN is a special case for NetEQ, all three sampling frequencies
- // are unregistered if one is deleted.
- if (IsCodecCN(codec_id)) {
- for (int i = 0; i < ACMCodecDB::kNumCodecs; i++) {
- if (IsCodecCN(i)) {
- stereo_receive_[i] = false;
- registered_pltypes_[i] = -1;
- }
- }
- } else {
- if (codec_id == mirror_id) {
- codecs_[codec_id]->DestructDecoder();
- if (stereo_receive_[codec_id]) {
- slave_codecs_[codec_id]->DestructDecoder();
- stereo_receive_[codec_id] = false;
- }
- }
- }
-
- // Check if this is the last registered stereo receive codec.
- if (stereo_receiver) {
- bool no_stereo = true;
-
- for (int i = 0; i < ACMCodecDB::kNumCodecs; i++) {
- if (stereo_receive_[i]) {
- // We still have stereo codecs registered.
- no_stereo = false;
- break;
- }
- }
-
- // If we don't have any stereo codecs left, change status.
- if (no_stereo) {
- neteq_.RemoveSlaves(); // No longer need the slave.
- stereo_receive_registered_ = false;
- }
- }
- }
- }
-
- if (registered_pltypes_[codec_id] == receive_red_pltype_) {
- // RED is going to be unregistered, set to an invalid value.
- receive_red_pltype_ = 255;
- }
- registered_pltypes_[codec_id] = -1;
-
- return 0;
-}
-
-int32_t AudioCodingModuleImpl::REDPayloadISAC(
- const int32_t isac_rate, const int16_t isac_bw_estimate,
- uint8_t* payload, int16_t* length_bytes) {
- if (!HaveValidEncoder("EncodeData")) {
- return -1;
- }
- int16_t status;
- status = codecs_[current_send_codec_idx_]->REDPayloadISAC(isac_rate,
- isac_bw_estimate,
- payload,
- length_bytes);
- return status;
-}
-
-void AudioCodingModuleImpl::ResetFragmentation(int vector_size) {
- for (int n = 0; n < kMaxNumFragmentationVectors; n++) {
- fragmentation_.fragmentationOffset[n] = n * MAX_PAYLOAD_SIZE_BYTE;
- }
- memset(fragmentation_.fragmentationLength, 0, kMaxNumFragmentationVectors *
- sizeof(fragmentation_.fragmentationLength[0]));
- memset(fragmentation_.fragmentationTimeDiff, 0, kMaxNumFragmentationVectors *
- sizeof(fragmentation_.fragmentationTimeDiff[0]));
- memset(fragmentation_.fragmentationPlType, 0, kMaxNumFragmentationVectors *
- sizeof(fragmentation_.fragmentationPlType[0]));
- fragmentation_.fragmentationVectorSize =
- static_cast<uint16_t>(vector_size);
-}
-
-// TODO(turajs): Add second parameter to enable/disable AV-sync.
-int AudioCodingModuleImpl::SetInitialPlayoutDelay(int delay_ms) {
- if (delay_ms < 0 || delay_ms > 10000) {
- return -1;
- }
-
- CriticalSectionScoped lock(acm_crit_sect_);
-
- // Receiver should be initialized before this call processed.
- if (!receiver_initialized_) {
- InitializeReceiverSafe();
- }
-
- if (first_payload_received_) {
- // Too late for this API. Only works before a call is started.
- return -1;
- }
- initial_delay_ms_ = delay_ms;
-
- // If initial delay is zero, NetEq buffer should not be tracked, also we
- // don't want to be in AV-sync mode.
- track_neteq_buffer_ = delay_ms > 0;
- av_sync_ = delay_ms > 0;
-
- neteq_.EnableAVSync(av_sync_);
- return neteq_.SetMinimumDelay(delay_ms);
-}
-
-bool AudioCodingModuleImpl::GetSilence(int desired_sample_rate_hz,
- AudioFrame* frame) {
- CriticalSectionScoped lock(acm_crit_sect_);
- if (initial_delay_ms_ == 0 || !track_neteq_buffer_) {
- return false;
- }
-
- if (accumulated_audio_ms_ >= initial_delay_ms_) {
- // We have enough data stored that match our initial delay target.
- track_neteq_buffer_ = false;
- return false;
- }
-
- // Record call to silence generator.
- call_stats_.DecodedBySilenceGenerator();
-
- // We stop accumulating packets, if the number of packets or the total size
- // exceeds a threshold.
- int max_num_packets;
- int buffer_size_bytes;
- int per_payload_overhead_bytes;
- neteq_.BufferSpec(max_num_packets, buffer_size_bytes,
- per_payload_overhead_bytes);
- int total_bytes_accumulated = num_bytes_accumulated_ +
- num_packets_accumulated_ * per_payload_overhead_bytes;
- if (num_packets_accumulated_ > max_num_packets * 0.9 ||
- total_bytes_accumulated > buffer_size_bytes * 0.9) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
- "GetSilence: Initial delay couldn't be achieved."
- " num_packets_accumulated=%d, total_bytes_accumulated=%d",
- num_packets_accumulated_, num_bytes_accumulated_);
- track_neteq_buffer_ = false;
- return false;
- }
-
- if (desired_sample_rate_hz > 0) {
- frame->sample_rate_hz_ = desired_sample_rate_hz;
- } else {
- frame->sample_rate_hz_ = 0;
- if (current_receive_codec_idx_ >= 0) {
- frame->sample_rate_hz_ =
- ACMCodecDB::database_[current_receive_codec_idx_].plfreq;
- } else {
- // No payload received yet, use the default sampling rate of NetEq.
- frame->sample_rate_hz_ = neteq_.CurrentSampFreqHz();
- }
- }
- frame->num_channels_ = expected_channels_;
- frame->samples_per_channel_ = frame->sample_rate_hz_ / 100; // Always 10 ms.
- frame->speech_type_ = AudioFrame::kCNG;
- frame->vad_activity_ = AudioFrame::kVadPassive;
- frame->energy_ = 0;
- int samples = frame->samples_per_channel_ * frame->num_channels_;
- memset(frame->data_, 0, samples * sizeof(int16_t));
- return true;
-}
-
-// Must be called within the scope of ACM critical section.
-int AudioCodingModuleImpl::PushSyncPacketSafe() {
- assert(av_sync_);
- last_sequence_number_++;
- last_incoming_send_timestamp_ += last_timestamp_diff_;
- last_receive_timestamp_ += last_timestamp_diff_;
-
- WebRtcRTPHeader rtp_info;
- rtp_info.header.payloadType = last_recv_audio_codec_pltype_;
- rtp_info.header.ssrc = last_ssrc_;
- rtp_info.header.markerBit = false;
- rtp_info.header.sequenceNumber = last_sequence_number_;
- rtp_info.header.timestamp = last_incoming_send_timestamp_;
- rtp_info.type.Audio.channel = stereo_receive_[current_receive_codec_idx_] ?
- 2 : 1;
- last_packet_was_sync_ = true;
- int payload_len_bytes = neteq_.RecIn(rtp_info, last_receive_timestamp_);
-
- if (payload_len_bytes < 0)
- return -1;
-
- // This is to account for sync packets inserted during the buffering phase.
- if (track_neteq_buffer_)
- UpdateBufferingSafe(rtp_info, payload_len_bytes);
-
- return 0;
-}
-
-// Must be called within the scope of ACM critical section.
-void AudioCodingModuleImpl::UpdateBufferingSafe(const WebRtcRTPHeader& rtp_info,
- int payload_len_bytes) {
- const int in_sample_rate_khz =
- (ACMCodecDB::database_[current_receive_codec_idx_].plfreq / 1000);
- if (first_payload_received_ &&
- rtp_info.header.timestamp > last_incoming_send_timestamp_ &&
- in_sample_rate_khz > 0) {
- accumulated_audio_ms_ += (rtp_info.header.timestamp -
- last_incoming_send_timestamp_) / in_sample_rate_khz;
- }
-
- num_packets_accumulated_++;
- num_bytes_accumulated_ += payload_len_bytes;
-
- playout_ts_ = static_cast<uint32_t>(
- rtp_info.header.timestamp - static_cast<uint32_t>(
- initial_delay_ms_ * in_sample_rate_khz));
-}
-
-uint32_t AudioCodingModuleImpl::NowTimestamp(int codec_id) {
- // Down-cast the time to (32-6)-bit since we only care about
- // the least significant bits. (32-6) bits cover 2^(32-6) = 67108864 ms.
- // we masked 6 most significant bits of 32-bit so we don't lose resolution
- // when do the following multiplication.
- int sample_rate_khz = ACMCodecDB::database_[codec_id].plfreq / 1000;
- const uint32_t now_in_ms = static_cast<uint32_t>(
- clock_->TimeInMilliseconds() & kMaskTimestamp);
- return static_cast<uint32_t>(sample_rate_khz * now_in_ms);
-}
-
-std::vector<uint16_t> AudioCodingModuleImpl::GetNackList(
- int round_trip_time_ms) const {
- CriticalSectionScoped lock(acm_crit_sect_);
- if (round_trip_time_ms < 0) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
- "GetNackList: round trip time cannot be negative."
- " round_trip_time_ms=%d", round_trip_time_ms);
- }
- if (nack_enabled_ && round_trip_time_ms >= 0) {
- assert(nack_.get());
- return nack_->GetNackList(round_trip_time_ms);
- }
- std::vector<uint16_t> empty_list;
- return empty_list;
-}
-
-int AudioCodingModuleImpl::LeastRequiredDelayMs() const {
- return std::max(neteq_.LeastRequiredDelayMs(), initial_delay_ms_);
-}
-
-int AudioCodingModuleImpl::EnableNack(size_t max_nack_list_size) {
- // Don't do anything if |max_nack_list_size| is out of range.
- if (max_nack_list_size == 0 ||
- max_nack_list_size > acm2::Nack::kNackListSizeLimit)
- return -1;
-
- CriticalSectionScoped lock(acm_crit_sect_);
- if (!nack_enabled_) {
- nack_.reset(acm2::Nack::Create(kNackThresholdPackets));
- nack_enabled_ = true;
-
- // Sampling rate might need to be updated if we change from disable to
- // enable. Do it if the receive codec is valid.
- if (current_receive_codec_idx_ >= 0) {
- nack_->UpdateSampleRate(
- ACMCodecDB::database_[current_receive_codec_idx_].plfreq);
- }
- }
- return nack_->SetMaxNackListSize(max_nack_list_size);
-}
-
-void AudioCodingModuleImpl::DisableNack() {
- CriticalSectionScoped lock(acm_crit_sect_);
- nack_.reset(); // Memory is released.
- nack_enabled_ = false;
-}
-
-const char* AudioCodingModuleImpl::Version() const {
- return kLegacyAcmVersion;
-}
-
-void AudioCodingModuleImpl::GetDecodingCallStatistics(
- AudioDecodingCallStats* call_stats) const {
- CriticalSectionScoped lock(acm_crit_sect_);
- *call_stats = call_stats_.GetDecodingStatistics();
-}
-
-} // namespace acm1
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h b/chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h
deleted file mode 100644
index f0b22f11465..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h
+++ /dev/null
@@ -1,455 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_AUDIO_CODING_MODULE_IMPL_H_
-#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_AUDIO_CODING_MODULE_IMPL_H_
-
-#include <vector>
-
-#include "webrtc/common_types.h"
-#include "webrtc/engine_configurations.h"
-#include "webrtc/modules/audio_coding/main/interface/audio_coding_module.h"
-#include "webrtc/modules/audio_coding/main/source/acm_codec_database.h"
-#include "webrtc/modules/audio_coding/main/source/acm_neteq.h"
-#include "webrtc/modules/audio_coding/main/source/acm_resampler.h"
-#include "webrtc/modules/audio_coding/main/acm2/call_statistics.h"
-#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-
-namespace webrtc {
-
-struct WebRtcACMAudioBuff;
-struct WebRtcACMCodecParams;
-class CriticalSectionWrapper;
-class RWLockWrapper;
-class Clock;
-
-namespace acm2 {
-class Nack;
-}
-
-namespace acm1 {
-
-class ACMDTMFDetection;
-class ACMGenericCodec;
-
-class AudioCodingModuleImpl : public AudioCodingModule {
- public:
- AudioCodingModuleImpl(const int32_t id, Clock* clock);
- ~AudioCodingModuleImpl();
-
- virtual const char* Version() const;
-
- // Change the unique identifier of this object.
- virtual int32_t ChangeUniqueId(const int32_t id);
-
- // Returns the number of milliseconds until the module want a worker thread
- // to call Process.
- int32_t TimeUntilNextProcess();
-
- // Process any pending tasks such as timeouts.
- int32_t Process();
-
- /////////////////////////////////////////
- // Sender
- //
-
- // Initialize send codec.
- int32_t InitializeSender();
-
- // Reset send codec.
- int32_t ResetEncoder();
-
- // Can be called multiple times for Codec, CNG, RED.
- int32_t RegisterSendCodec(const CodecInst& send_codec);
-
- // Register Secondary codec for dual-streaming. Dual-streaming is activated
- // right after the secondary codec is registered.
- int RegisterSecondarySendCodec(const CodecInst& send_codec);
-
- // Unregister the secondary codec. Dual-streaming is deactivated right after
- // deregistering secondary codec.
- void UnregisterSecondarySendCodec();
-
- // Get the secondary codec.
- int SecondarySendCodec(CodecInst* secondary_codec) const;
-
- // Get current send codec.
- int32_t SendCodec(CodecInst* current_codec) const;
-
- // Get current send frequency.
- int32_t SendFrequency() const;
-
- // Get encode bit-rate.
- // Adaptive rate codecs return their current encode target rate, while other
- // codecs return there long-term average or their fixed rate.
- int32_t SendBitrate() const;
-
- // Set available bandwidth, inform the encoder about the
- // estimated bandwidth received from the remote party.
- virtual int32_t SetReceivedEstimatedBandwidth(const int32_t bw);
-
- // Register a transport callback which will be
- // called to deliver the encoded buffers.
- int32_t RegisterTransportCallback(AudioPacketizationCallback* transport);
-
- // Add 10 ms of raw (PCM) audio data to the encoder.
- int32_t Add10MsData(const AudioFrame& audio_frame);
-
- /////////////////////////////////////////
- // (FEC) Forward Error Correction
- //
-
- // Configure FEC status i.e on/off.
- int32_t SetFECStatus(const bool enable_fec);
-
- // Get FEC status.
- bool FECStatus() const;
-
- /////////////////////////////////////////
- // (VAD) Voice Activity Detection
- // and
- // (CNG) Comfort Noise Generation
- //
-
- int32_t SetVAD(bool enable_dtx = true,
- bool enable_vad = false,
- ACMVADMode mode = VADNormal);
-
- int32_t VAD(bool* dtx_enabled, bool* vad_enabled, ACMVADMode* mode) const;
-
- int32_t RegisterVADCallback(ACMVADCallback* vad_callback);
-
- /////////////////////////////////////////
- // Receiver
- //
-
- // Initialize receiver, resets codec database etc.
- int32_t InitializeReceiver();
-
- // Reset the decoder state.
- int32_t ResetDecoder();
-
- // Get current receive frequency.
- int32_t ReceiveFrequency() const;
-
- // Get current playout frequency.
- int32_t PlayoutFrequency() const;
-
- // Register possible receive codecs, can be called multiple times,
- // for codecs, CNG, DTMF, RED.
- int32_t RegisterReceiveCodec(const CodecInst& receive_codec);
-
- // Get current received codec.
- int32_t ReceiveCodec(CodecInst* current_codec) const;
-
- // Incoming packet from network parsed and ready for decode.
- int32_t IncomingPacket(const uint8_t* incoming_payload,
- const int32_t payload_length,
- const WebRtcRTPHeader& rtp_info);
-
- // Incoming payloads, without rtp-info, the rtp-info will be created in ACM.
- // One usage for this API is when pre-encoded files are pushed in ACM.
- int32_t IncomingPayload(const uint8_t* incoming_payload,
- const int32_t payload_length,
- const uint8_t payload_type,
- const uint32_t timestamp = 0);
-
- // NetEq minimum playout delay (used for lip-sync). The actual target delay
- // is the max of |time_ms| and the required delay dictated by the channel.
- int SetMinimumPlayoutDelay(int time_ms);
-
- // NetEq maximum playout delay. The actual target delay is the min of
- // |time_ms| and the required delay dictated by the channel.
- int SetMaximumPlayoutDelay(int time_ms);
-
- // The shortest latency, in milliseconds, required by jitter buffer. This
- // is computed based on inter-arrival times and playout mode of NetEq. The
- // actual delay is the maximum of least-required-delay and the minimum-delay
- // specified by SetMinumumPlayoutDelay() API.
- //
- int LeastRequiredDelayMs() const ;
-
- // Configure Dtmf playout status i.e on/off playout the incoming outband Dtmf
- // tone.
- int32_t SetDtmfPlayoutStatus(const bool enable);
-
- // Get Dtmf playout status.
- bool DtmfPlayoutStatus() const;
-
- // Estimate the Bandwidth based on the incoming stream, needed
- // for one way audio where the RTCP send the BW estimate.
- // This is also done in the RTP module .
- int32_t DecoderEstimatedBandwidth() const;
-
- // Set playout mode voice, fax.
- int32_t SetPlayoutMode(const AudioPlayoutMode mode);
-
- // Get playout mode voice, fax.
- AudioPlayoutMode PlayoutMode() const;
-
- // Get playout timestamp.
- int32_t PlayoutTimestamp(uint32_t* timestamp);
-
- // Get 10 milliseconds of raw audio data to play out, and
- // automatic resample to the requested frequency if > 0.
- int32_t PlayoutData10Ms(int32_t desired_freq_hz,
- AudioFrame* audio_frame);
-
- /////////////////////////////////////////
- // Statistics
- //
-
- int32_t NetworkStatistics(ACMNetworkStatistics* statistics);
-
- void DestructEncoderInst(void* inst);
-
- int16_t AudioBuffer(WebRtcACMAudioBuff& buffer);
-
- // GET RED payload for iSAC. The method id called when 'this' ACM is
- // the default ACM.
- int32_t REDPayloadISAC(const int32_t isac_rate,
- const int16_t isac_bw_estimate,
- uint8_t* payload,
- int16_t* length_bytes);
-
- int16_t SetAudioBuffer(WebRtcACMAudioBuff& buffer);
-
- uint32_t EarliestTimestamp() const;
-
- int32_t LastEncodedTimestamp(uint32_t& timestamp) const;
-
- int32_t ReplaceInternalDTXWithWebRtc(const bool use_webrtc_dtx);
-
- int32_t IsInternalDTXReplacedWithWebRtc(bool* uses_webrtc_dtx);
-
- int SetISACMaxRate(int max_bit_per_sec);
-
- int SetISACMaxPayloadSize(int max_size_bytes);
-
- int32_t ConfigISACBandwidthEstimator(
- int frame_size_ms,
- int rate_bit_per_sec,
- bool enforce_frame_size = false);
-
- int UnregisterReceiveCodec(uint8_t payload_type);
-
- std::vector<uint16_t> GetNackList(int round_trip_time_ms) const;
-
- protected:
- void UnregisterSendCodec();
-
- int32_t UnregisterReceiveCodecSafe(const int16_t id);
-
- ACMGenericCodec* CreateCodec(const CodecInst& codec);
-
- int16_t DecoderParamByPlType(const uint8_t payload_type,
- WebRtcACMCodecParams& codec_params) const;
-
- int16_t DecoderListIDByPlName(
- const char* name, const uint16_t frequency = 0) const;
-
- int32_t InitializeReceiverSafe();
-
- bool HaveValidEncoder(const char* caller_name) const;
-
- int32_t RegisterRecCodecMSSafe(const CodecInst& receive_codec,
- int16_t codec_id,
- int16_t mirror_id,
- ACMNetEQ::JitterBuffer jitter_buffer);
-
- // Set VAD/DTX status. This function does not acquire a lock, and it is
- // created to be called only from inside a critical section.
- int SetVADSafe(bool enable_dtx, bool enable_vad, ACMVADMode mode);
-
- // Process buffered audio when dual-streaming is not enabled (When RED is
- // enabled still this function is used.)
- int ProcessSingleStream();
-
- // Process buffered audio when dual-streaming is enabled, i.e. secondary send
- // codec is registered.
- int ProcessDualStream();
-
- // Preprocessing of input audio, including resampling and down-mixing if
- // required, before pushing audio into encoder's buffer.
- //
- // in_frame: input audio-frame
- // ptr_out: pointer to output audio_frame. If no preprocessing is required
- // |ptr_out| will be pointing to |in_frame|, otherwise pointing to
- // |preprocess_frame_|.
- //
- // Return value:
- // -1: if encountering an error.
- // 0: otherwise.
- int PreprocessToAddData(const AudioFrame& in_frame,
- const AudioFrame** ptr_out);
-
- // Set initial playout delay.
- // -delay_ms: delay in millisecond.
- //
- // Return value:
- // -1: if cannot set the delay.
- // 0: if delay set successfully.
- int SetInitialPlayoutDelay(int delay_ms);
-
- // Enable NACK and set the maximum size of the NACK list.
- int EnableNack(size_t max_nack_list_size);
-
- // Disable NACK.
- void DisableNack();
-
- void GetDecodingCallStatistics(AudioDecodingCallStats* call_stats) const;
-
- private:
- // Change required states after starting to receive the codec corresponding
- // to |index|.
- int UpdateUponReceivingCodec(int index);
-
- // Remove all slaves and initialize a stereo slave with required codecs
- // from the master.
- int InitStereoSlave();
-
- // Returns true if the codec's |index| is registered with the master and
- // is a stereo codec, RED or CN.
- bool IsCodecForSlave(int index) const;
-
- int EncodeFragmentation(int fragmentation_index, int payload_type,
- uint32_t current_timestamp,
- ACMGenericCodec* encoder,
- uint8_t* stream);
-
- void ResetFragmentation(int vector_size);
-
- bool GetSilence(int desired_sample_rate_hz, AudioFrame* frame);
-
- // Push a synchronization packet into NetEq. Such packets result in a frame
- // of zeros (not decoded by the corresponding decoder). The size of the frame
- // is the same as last decoding. NetEq has a special payload for this.
- // Call within the scope of ACM critical section.
- int PushSyncPacketSafe();
-
- // Update the parameters required in initial phase of buffering, when
- // initial playout delay is requested. Call within the scope of ACM critical
- // section.
- void UpdateBufferingSafe(const WebRtcRTPHeader& rtp_info,
- int payload_len_bytes);
-
- //
- // Return the timestamp of current time, computed according to sampling rate
- // of the codec identified by |codec_id|.
- //
- uint32_t NowTimestamp(int codec_id);
-
- AudioPacketizationCallback* packetization_callback_;
- int32_t id_;
- uint32_t last_timestamp_;
- uint32_t last_in_timestamp_;
- CodecInst send_codec_inst_;
- uint8_t cng_nb_pltype_;
- uint8_t cng_wb_pltype_;
- uint8_t cng_swb_pltype_;
- uint8_t cng_fb_pltype_;
- uint8_t red_pltype_;
- bool vad_enabled_;
- bool dtx_enabled_;
- ACMVADMode vad_mode_;
- ACMGenericCodec* codecs_[ACMCodecDB::kMaxNumCodecs];
- ACMGenericCodec* slave_codecs_[ACMCodecDB::kMaxNumCodecs];
- int16_t mirror_codec_idx_[ACMCodecDB::kMaxNumCodecs];
- bool stereo_receive_[ACMCodecDB::kMaxNumCodecs];
- bool stereo_receive_registered_;
- bool stereo_send_;
- int prev_received_channel_;
- int expected_channels_;
- int32_t current_send_codec_idx_;
- int current_receive_codec_idx_;
- bool send_codec_registered_;
- ACMResampler input_resampler_;
- ACMResampler output_resampler_;
- ACMNetEQ neteq_;
- CriticalSectionWrapper* acm_crit_sect_;
- ACMVADCallback* vad_callback_;
- uint8_t last_recv_audio_codec_pltype_;
-
- // RED/FEC.
- bool is_first_red_;
- bool fec_enabled_;
- // TODO(turajs): |red_buffer_| is allocated in constructor, why having them
- // as pointers and not an array. If concerned about the memory, then make a
- // set-up function to allocate them only when they are going to be used, i.e.
- // FEC or Dual-streaming is enabled.
- uint8_t* red_buffer_;
- // TODO(turajs): we actually don't need |fragmentation_| as a member variable.
- // It is sufficient to keep the length & payload type of previous payload in
- // member variables.
- RTPFragmentationHeader fragmentation_;
- uint32_t last_fec_timestamp_;
- // If no RED is registered as receive codec this
- // will have an invalid value.
- uint8_t receive_red_pltype_;
-
- // This is to keep track of CN instances where we can send DTMFs.
- uint8_t previous_pltype_;
-
- // This keeps track of payload types associated with codecs_[].
- // We define it as signed variable and initialize with -1 to indicate
- // unused elements.
- int16_t registered_pltypes_[ACMCodecDB::kMaxNumCodecs];
-
- // Used when payloads are pushed into ACM without any RTP info
- // One example is when pre-encoded bit-stream is pushed from
- // a file.
- WebRtcRTPHeader* dummy_rtp_header_;
- uint16_t recv_pl_frame_size_smpls_;
-
- bool receiver_initialized_;
- ACMDTMFDetection* dtmf_detector_;
-
- AudioCodingFeedback* dtmf_callback_;
- int16_t last_detected_tone_;
- CriticalSectionWrapper* callback_crit_sect_;
-
- AudioFrame audio_frame_;
- AudioFrame preprocess_frame_;
- CodecInst secondary_send_codec_inst_;
- scoped_ptr<ACMGenericCodec> secondary_encoder_;
-
- // Initial delay.
- int initial_delay_ms_;
- int num_packets_accumulated_;
- int num_bytes_accumulated_;
- int accumulated_audio_ms_;
- int first_payload_received_;
- uint32_t last_incoming_send_timestamp_;
- bool track_neteq_buffer_;
- uint32_t playout_ts_;
-
- // AV-sync is enabled. In AV-sync mode, sync packet pushed during long packet
- // losses.
- bool av_sync_;
-
- // Latest send timestamp difference of two consecutive packets.
- uint32_t last_timestamp_diff_;
- uint16_t last_sequence_number_;
- uint32_t last_ssrc_;
- bool last_packet_was_sync_;
- int64_t last_receive_timestamp_;
-
- Clock* clock_;
- scoped_ptr<acm2::Nack> nack_;
- bool nack_enabled_;
-
- acm2::CallStatistics call_stats_;
-};
-
-} // namespace acm1
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_AUDIO_CODING_MODULE_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/Android.mk b/chromium/third_party/webrtc/modules/audio_coding/neteq/Android.mk
deleted file mode 100644
index 84267becf2c..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/Android.mk
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-
-include $(LOCAL_PATH)/../../../../android-webrtc.mk
-
-LOCAL_ARM_MODE := arm
-LOCAL_MODULE_CLASS := STATIC_LIBRARIES
-LOCAL_MODULE := libwebrtc_neteq
-LOCAL_MODULE_TAGS := optional
-LOCAL_SRC_FILES := \
- accelerate.c \
- automode.c \
- bgn_update.c \
- bufstats_decision.c \
- cng_internal.c \
- codec_db.c \
- correlator.c \
- dsp.c \
- dsp_helpfunctions.c \
- dtmf_buffer.c \
- dtmf_tonegen.c \
- expand.c \
- mcu_address_init.c \
- mcu_dsp_common.c \
- mcu_reset.c \
- merge.c \
- min_distortion.c \
- mix_voice_unvoice.c \
- mute_signal.c \
- normal.c \
- packet_buffer.c \
- peak_detection.c \
- preemptive_expand.c \
- random_vector.c \
- recin.c \
- recout.c \
- rtcp.c \
- rtp.c \
- set_fs.c \
- signal_mcu.c \
- split_and_insert.c \
- unmute_signal.c \
- webrtc_neteq.c
-
-# Flags passed to both C and C++ files.
-LOCAL_CFLAGS := \
- $(MY_WEBRTC_COMMON_DEFS) \
- '-DNETEQ_VOICEENGINE_CODECS'
-
-LOCAL_C_INCLUDES := \
- $(LOCAL_PATH)/interface \
- $(LOCAL_PATH)/../codecs/cng/include \
- $(LOCAL_PATH)/../../.. \
- $(LOCAL_PATH)/../../../common_audio/signal_processing/include
-
-LOCAL_SHARED_LIBRARIES := \
- libcutils \
- libdl \
- libstlport
-
-ifndef NDK_ROOT
-include external/stlport/libstlport.mk
-endif
-include $(BUILD_STATIC_LIBRARY)
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/neteq/OWNERS
index b5c79cef499..072e754998f 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/OWNERS
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/OWNERS
@@ -1,3 +1,11 @@
henrik.lundin@webrtc.org
tina.legrand@webrtc.org
turaj@webrtc.org
+minyue@webrtc.org
+
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.c
deleted file mode 100644
index a345a8fdcd2..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.c
+++ /dev/null
@@ -1,493 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the Accelerate algorithm that is used to reduce
- * the delay by removing a part of the audio stream.
- */
-
-#include "dsp.h"
-
-#include "signal_processing_library.h"
-
-#include "dsp_helpfunctions.h"
-#include "neteq_error_codes.h"
-
-#define ACCELERATE_CORR_LEN 50
-#define ACCELERATE_MIN_LAG 10
-#define ACCELERATE_MAX_LAG 60
-#define ACCELERATE_DOWNSAMPLED_LEN (ACCELERATE_CORR_LEN + ACCELERATE_MAX_LAG)
-
-/* Scratch usage:
-
- Type Name size startpos endpos
- int16_t pw16_downSampSpeech 110 0 109
- int32_t pw32_corr 2*50 110 209
- int16_t pw16_corr 50 0 49
-
- Total: 110+2*50
- */
-
-#define SCRATCH_PW16_DS_SPEECH 0
-#define SCRATCH_PW32_CORR ACCELERATE_DOWNSAMPLED_LEN
-#define SCRATCH_PW16_CORR 0
-
-/****************************************************************************
- * WebRtcNetEQ_Accelerate(...)
- *
- * This function tries to shorten the audio data by removing one or several
- * pitch periods. The operation is only carried out if the correlation is
- * strong or if the signal energy is very low.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - scratchPtr : Pointer to scratch vector.
- * - decoded : Pointer to newly decoded speech.
- * - len : Length of decoded speech.
- * - BGNonly : If non-zero, Accelerate will only remove the last
- * DEFAULT_TIME_ADJUST seconds of the input.
- * No signal matching is done.
- *
- * Output:
- * - inst : Updated instance
- * - outData : Pointer to a memory space where the output data
- * should be stored
- * - pw16_len : Number of samples written to outData.
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_Accelerate(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- const int16_t *pw16_decoded, int len,
- int16_t *pw16_outData, int16_t *pw16_len,
- int16_t BGNonly)
-{
-
-#ifdef SCRATCH
- /* Use scratch memory for internal temporary vectors */
- int16_t *pw16_downSampSpeech = pw16_scratchPtr + SCRATCH_PW16_DS_SPEECH;
- int32_t *pw32_corr = (int32_t*) (pw16_scratchPtr + SCRATCH_PW32_CORR);
- int16_t *pw16_corr = pw16_scratchPtr + SCRATCH_PW16_CORR;
-#else
- /* Allocate memory for temporary vectors */
- int16_t pw16_downSampSpeech[ACCELERATE_DOWNSAMPLED_LEN];
- int32_t pw32_corr[ACCELERATE_CORR_LEN];
- int16_t pw16_corr[ACCELERATE_CORR_LEN];
-#endif
- int16_t w16_decodedMax = 0;
- int16_t w16_tmp;
- int16_t w16_tmp2;
- int32_t w32_tmp;
- int32_t w32_tmp2;
-
- const int16_t w16_startLag = ACCELERATE_MIN_LAG;
- const int16_t w16_endLag = ACCELERATE_MAX_LAG;
- const int16_t w16_corrLen = ACCELERATE_CORR_LEN;
- const int16_t *pw16_vec1, *pw16_vec2;
- int16_t *pw16_vectmp;
- int16_t w16_inc, w16_startfact;
- int16_t w16_bestIndex, w16_bestVal;
- int16_t w16_VAD = 1;
- int16_t fsMult;
- int16_t fsMult120;
- int32_t w32_en1, w32_en2, w32_cc;
- int16_t w16_en1, w16_en2;
- int16_t w16_en1Scale, w16_en2Scale;
- int16_t w16_sqrtEn1En2;
- int16_t w16_bestCorr = 0;
- int ok;
-
-#ifdef NETEQ_STEREO
- MasterSlaveInfo *msInfo = inst->msInfo;
-#endif
-
- fsMult = WebRtcNetEQ_CalcFsMult(inst->fs); /* Calculate fs/8000 */
-
- /* Pre-calculate common multiplication with fsMult */
- fsMult120 = (int16_t) WEBRTC_SPL_MUL_16_16(fsMult, 120); /* 15 ms */
-
- inst->ExpandInst.w16_consecExp = 0; /* Last was not expand any more */
-
- /* Sanity check for len variable; must be (almost) 30 ms
- (120*fsMult + max(bestIndex)) */
- if (len < (int16_t) WEBRTC_SPL_MUL_16_16((120 + 119), fsMult))
- {
- /* Length of decoded data too short */
- inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
- *pw16_len = len;
-
- /* simply move all data from decoded to outData */
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
-
- return NETEQ_OTHER_ERROR;
- }
-
- /***********************************/
- /* Special operations for BGN only */
- /***********************************/
-
- /* Check if "background noise only" flag is set */
- if (BGNonly)
- {
- /* special operation for BGN only; simply remove a chunk of data */
- w16_bestIndex = DEFAULT_TIME_ADJUST * WEBRTC_SPL_LSHIFT_W16(fsMult, 3); /* X*fs/1000 */
-
- /* Sanity check for bestIndex */
- if (w16_bestIndex > len)
- { /* not good, do nothing instead */
- inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
- *pw16_len = len;
-
- /* simply move all data from decoded to outData */
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
-
- return NETEQ_OTHER_ERROR;
- }
-
- /* set length parameter */
- *pw16_len = len - w16_bestIndex; /* we remove bestIndex samples */
-
- /* copy to output */
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, *pw16_len);
-
- /* set mode */
- inst->w16_mode = MODE_LOWEN_ACCELERATE;
-
- /* update statistics */
- inst->statInst.accelerateLength += w16_bestIndex;
- /* Short-term activity statistics. */
- inst->activity_stats.accelerate_bgn_samples += w16_bestIndex;
-
- return 0;
- } /* end of special code for BGN mode */
-
-#ifdef NETEQ_STEREO
-
- /* Sanity for msInfo */
- if (msInfo == NULL)
- {
- /* this should not happen here */
- return MASTER_SLAVE_ERROR;
- }
-
- if (msInfo->msMode != NETEQ_SLAVE)
- {
- /* Find correlation lag only for non-slave instances */
-
-#endif
-
- /****************************************************************/
- /* Find the strongest correlation lag by downsampling to 4 kHz, */
- /* calculating correlation for downsampled signal and finding */
- /* the strongest correlation peak. */
- /****************************************************************/
-
- /* find maximum absolute value */
- w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (int16_t) len);
-
- /* downsample the decoded speech to 4 kHz */
- ok = WebRtcNetEQ_DownSampleTo4kHz(pw16_decoded, len, inst->fs, pw16_downSampSpeech,
- ACCELERATE_DOWNSAMPLED_LEN, 1 /* compensate delay*/);
- if (ok != 0)
- {
- /* error */
- inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
- *pw16_len = len;
- /* simply move all data from decoded to outData */
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
- return NETEQ_OTHER_ERROR;
- }
-
- /*
- * Set scaling factor for cross correlation to protect against overflow
- * (log2(50) => 6)
- */
- w16_tmp = 6 - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax));
- w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
-
- /* Perform correlation from lag 10 to lag 60 in 4 kHz domain */
- WebRtcNetEQ_CrossCorr(
- pw32_corr, &pw16_downSampSpeech[w16_endLag],
- &pw16_downSampSpeech[w16_endLag - w16_startLag], w16_corrLen,
- (int16_t) (w16_endLag - w16_startLag), w16_tmp, -1);
-
- /* Normalize correlation to 14 bits and put in a int16_t vector */
- w32_tmp = WebRtcSpl_MaxAbsValueW32(pw32_corr, w16_corrLen);
- w16_tmp = 17 - WebRtcSpl_NormW32(w32_tmp);
- w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
-
- WebRtcSpl_VectorBitShiftW32ToW16(pw16_corr, w16_corrLen, pw32_corr, w16_tmp);
-
-#ifdef NETEQ_STEREO
- } /* end if (msInfo->msMode != NETEQ_SLAVE) */
-
- if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
- {
- /* Find the strongest correlation peak by using the parabolic fit method */
- WebRtcNetEQ_PeakDetection(pw16_corr, (int16_t) w16_corrLen, 1, fsMult,
- &w16_bestIndex, &w16_bestVal);
- /* 0 <= bestIndex <= (2*corrLen - 1)*fsMult = 99*fsMult */
-
- /* Compensate bestIndex for displaced starting position */
- w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
- /* 20*fsMult <= bestIndex <= 119*fsMult */
-
- msInfo->bestIndex = w16_bestIndex;
- }
- else if (msInfo->msMode == NETEQ_SLAVE)
- {
- if (msInfo->extraInfo == ACC_FAIL)
- {
- /* Master has signaled an unsuccessful accelerate */
- w16_bestIndex = 0;
- }
- else
- {
- /* Get best index from master */
- w16_bestIndex = msInfo->bestIndex;
- }
- }
- else
- {
- /* Invalid mode */
- return MASTER_SLAVE_ERROR;
- }
-
-#else /* NETEQ_STEREO */
-
- /* Find the strongest correlation peak by using the parabolic fit method */
- WebRtcNetEQ_PeakDetection(pw16_corr, (int16_t) w16_corrLen, 1, fsMult,
- &w16_bestIndex, &w16_bestVal);
- /* 0 <= bestIndex <= (2*corrLen - 1)*fsMult = 99*fsMult */
-
- /* Compensate bestIndex for displaced starting position */
- w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
- /* 20*fsMult <= bestIndex <= 119*fsMult */
-
-#endif /* NETEQ_STEREO */
-
-#ifdef NETEQ_STEREO
-
- if (msInfo->msMode != NETEQ_SLAVE)
- {
- /* Calculate correlation only for non-slave instances */
-
-#endif /* NETEQ_STEREO */
-
- /*****************************************************/
- /* Calculate correlation bestCorr for the found lag. */
- /* Also do a simple VAD decision. */
- /*****************************************************/
-
- /*
- * Calculate scaling to ensure that bestIndex samples can be square-summed
- * without overflowing
- */
- w16_tmp = (31
- - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax)));
- w16_tmp += (31 - WebRtcSpl_NormW32(w16_bestIndex));
- w16_tmp -= 31;
- w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
-
- /* vec1 starts at 15 ms minus one pitch period */
- pw16_vec1 = &pw16_decoded[fsMult120 - w16_bestIndex];
- /* vec2 start at 15 ms */
- pw16_vec2 = &pw16_decoded[fsMult120];
-
- /* Calculate energies for vec1 and vec2 */
- w32_en1 = WebRtcNetEQ_DotW16W16((int16_t*) pw16_vec1,
- (int16_t*) pw16_vec1, w16_bestIndex, w16_tmp);
- w32_en2 = WebRtcNetEQ_DotW16W16((int16_t*) pw16_vec2,
- (int16_t*) pw16_vec2, w16_bestIndex, w16_tmp);
-
- /* Calculate cross-correlation at the found lag */
- w32_cc = WebRtcNetEQ_DotW16W16((int16_t*) pw16_vec1, (int16_t*) pw16_vec2,
- w16_bestIndex, w16_tmp);
-
- /* Check VAD constraint
- ((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
- w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_en1 + w32_en2, 4); /* (en1+en2)/(2*8) */
- if (inst->BGNInst.w16_initialized == 1)
- {
- w32_tmp2 = inst->BGNInst.w32_energy;
- }
- else
- {
- /* if BGN parameters have not been estimated, use a fixed threshold */
- w32_tmp2 = 75000;
- }
- w16_tmp2 = 16 - WebRtcSpl_NormW32(w32_tmp2);
- w16_tmp2 = WEBRTC_SPL_MAX(0, w16_tmp2);
- w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_tmp, w16_tmp2);
- w16_tmp2 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_tmp2, w16_tmp2);
- w32_tmp2 = WEBRTC_SPL_MUL_16_16(w16_bestIndex, w16_tmp2);
-
- /* Scale w32_tmp properly before comparing with w32_tmp2 */
- /* (w16_tmp is scaling before energy calculation, thus 2*w16_tmp) */
- if (WebRtcSpl_NormW32(w32_tmp) < WEBRTC_SPL_LSHIFT_W32(w16_tmp,1))
- {
- /* Cannot scale only w32_tmp, must scale w32_temp2 too */
- int16_t tempshift = WebRtcSpl_NormW32(w32_tmp);
- w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, tempshift);
- w32_tmp2 = WEBRTC_SPL_RSHIFT_W32(w32_tmp2,
- WEBRTC_SPL_LSHIFT_W32(w16_tmp,1) - tempshift);
- }
- else
- {
- w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp,
- WEBRTC_SPL_LSHIFT_W32(w16_tmp,1));
- }
-
- if (w32_tmp <= w32_tmp2) /*((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
- {
- /* The signal seems to be passive speech */
- w16_VAD = 0;
- w16_bestCorr = 0; /* Correlation does not matter */
- }
- else
- {
- /* The signal is active speech */
- w16_VAD = 1;
-
- /* Calculate correlation (cc/sqrt(en1*en2)) */
-
- /* Start with calculating scale values */
- w16_en1Scale = 16 - WebRtcSpl_NormW32(w32_en1);
- w16_en1Scale = WEBRTC_SPL_MAX(0, w16_en1Scale);
- w16_en2Scale = 16 - WebRtcSpl_NormW32(w32_en2);
- w16_en2Scale = WEBRTC_SPL_MAX(0, w16_en2Scale);
-
- /* Make sure total scaling is even (to simplify scale factor after sqrt) */
- if ((w16_en1Scale + w16_en2Scale) & 1)
- {
- w16_en1Scale += 1;
- }
-
- /* Convert energies to int16_t */
- w16_en1 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
- w16_en2 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
-
- /* Calculate energy product */
- w32_tmp = WEBRTC_SPL_MUL_16_16(w16_en1, w16_en2);
-
- /* Calculate square-root of energy product */
- w16_sqrtEn1En2 = (int16_t) WebRtcSpl_SqrtFloor(w32_tmp);
-
- /* Calculate cc/sqrt(en1*en2) in Q14 */
- w16_tmp = 14 - WEBRTC_SPL_RSHIFT_W16(w16_en1Scale+w16_en2Scale, 1);
- w32_cc = WEBRTC_SPL_SHIFT_W32(w32_cc, w16_tmp);
- w32_cc = WEBRTC_SPL_MAX(0, w32_cc); /* Don't divide with negative number */
- w16_bestCorr = (int16_t) WebRtcSpl_DivW32W16(w32_cc, w16_sqrtEn1En2);
- w16_bestCorr = WEBRTC_SPL_MIN(16384, w16_bestCorr); /* set maximum to 1.0 */
- }
-
-#ifdef NETEQ_STEREO
-
- } /* end if (msInfo->msMode != NETEQ_SLAVE) */
-
-#endif /* NETEQ_STEREO */
-
- /************************************************/
- /* Check accelerate criteria and remove samples */
- /************************************************/
-
- /* Check for strong correlation (>0.9) or passive speech */
-#ifdef NETEQ_STEREO
- if ((((w16_bestCorr > 14746) || (w16_VAD == 0)) && (msInfo->msMode != NETEQ_SLAVE))
- || ((msInfo->msMode == NETEQ_SLAVE) && (msInfo->extraInfo != ACC_FAIL)))
-#else
- if ((w16_bestCorr > 14746) || (w16_VAD == 0))
-#endif
- {
- /* Do accelerate operation by overlap add */
-
- /*
- * Calculate cross-fading slope so that the fading factor goes from
- * 1 (16384 in Q14) to 0 in one pitch period (bestIndex).
- */
- w16_inc = (int16_t) WebRtcSpl_DivW32W16((int32_t) 16384,
- (int16_t) (w16_bestIndex + 1)); /* in Q14 */
-
- /* Initiate fading factor */
- w16_startfact = 16384 - w16_inc;
-
- /* vec1 starts at 15 ms minus one pitch period */
- pw16_vec1 = &pw16_decoded[fsMult120 - w16_bestIndex];
- /* vec2 start at 15 ms */
- pw16_vec2 = &pw16_decoded[fsMult120];
-
- /* Copy unmodified part [0 to 15 ms minus 1 pitch period] */
- w16_tmp = (fsMult120 - w16_bestIndex);
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, w16_tmp);
-
- /* Generate interpolated part of length bestIndex (1 pitch period) */
- pw16_vectmp = pw16_outData + w16_tmp; /* start of interpolation output */
- /* Reuse mixing function from Expand */
- WebRtcNetEQ_MixVoiceUnvoice(pw16_vectmp, (int16_t*) pw16_vec1,
- (int16_t*) pw16_vec2, &w16_startfact, w16_inc, w16_bestIndex);
-
- /* Move the last part (also unmodified) */
- /* Take from decoded at 15 ms + 1 pitch period */
- pw16_vec2 = &pw16_decoded[fsMult120 + w16_bestIndex];
- WEBRTC_SPL_MEMMOVE_W16(&pw16_outData[fsMult120], pw16_vec2,
- (int16_t) (len - fsMult120 - w16_bestIndex));
-
- /* Set the mode flag */
- if (w16_VAD)
- {
- inst->w16_mode = MODE_SUCCESS_ACCELERATE;
- }
- else
- {
- inst->w16_mode = MODE_LOWEN_ACCELERATE;
- }
-
- /* Calculate resulting length = original length - pitch period */
- *pw16_len = len - w16_bestIndex;
-
- /* Update in-call statistics */
- inst->statInst.accelerateLength += w16_bestIndex;
- /* Short-term activity statistics. */
- inst->activity_stats.accelarate_normal_samples += w16_bestIndex;
-
- return 0;
- }
- else
- {
- /* Accelerate not allowed */
-
-#ifdef NETEQ_STEREO
- /* Signal to slave(s) that this was unsuccessful */
- if (msInfo->msMode == NETEQ_MASTER)
- {
- msInfo->extraInfo = ACC_FAIL;
- }
-#endif
-
- /* Set mode flag to unsuccessful accelerate */
- inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
-
- /* Length is unmodified */
- *pw16_len = len;
-
- /* Simply move all data from decoded to outData */
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
-
- return 0;
- }
-}
-
-#undef SCRATCH_PW16_DS_SPEECH
-#undef SCRATCH_PW32_CORR
-#undef SCRATCH_PW16_CORR
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/accelerate.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.cc
index 88cfa4dad9b..6acd778a233 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/accelerate.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/accelerate.h"
+#include "webrtc/modules/audio_coding/neteq/accelerate.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
@@ -78,4 +78,11 @@ Accelerate::ReturnCodes Accelerate::CheckCriteriaAndStretch(
}
}
+Accelerate* AccelerateFactory::Create(
+ int sample_rate_hz,
+ size_t num_channels,
+ const BackgroundNoise& background_noise) const {
+ return new Accelerate(sample_rate_hz, num_channels, background_noise);
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/accelerate.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.h
index 83e3e384543..2da999326a3 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/accelerate.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/accelerate.h
@@ -8,14 +8,14 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_ACCELERATE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_ACCELERATE_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_ACCELERATE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_ACCELERATE_H_
#include <assert.h>
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/time_stretch.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/time_stretch.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -64,5 +64,14 @@ class Accelerate : public TimeStretch {
DISALLOW_COPY_AND_ASSIGN(Accelerate);
};
+struct AccelerateFactory {
+ AccelerateFactory() {}
+ virtual ~AccelerateFactory() {}
+
+ virtual Accelerate* Create(int sample_rate_hz,
+ size_t num_channels,
+ const BackgroundNoise& background_noise) const;
+};
+
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_ACCELERATE_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_ACCELERATE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier.cc
new file mode 100644
index 00000000000..cc4bc97c30c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier.cc
@@ -0,0 +1,71 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/audio_classifier.h"
+
+#include <assert.h>
+#include <string.h>
+
+namespace webrtc {
+
+static const int kDefaultSampleRateHz = 48000;
+static const int kDefaultFrameRateHz = 50;
+static const int kDefaultFrameSizeSamples =
+ kDefaultSampleRateHz / kDefaultFrameRateHz;
+static const float kDefaultThreshold = 0.5f;
+
+AudioClassifier::AudioClassifier()
+ : analysis_info_(),
+ is_music_(false),
+ music_probability_(0),
+ // This actually assigns the pointer to a static constant struct
+ // rather than creates a struct and |celt_mode_| does not need
+ // to be deleted.
+ celt_mode_(opus_custom_mode_create(kDefaultSampleRateHz,
+ kDefaultFrameSizeSamples,
+ NULL)),
+ analysis_state_() {
+ assert(celt_mode_);
+}
+
+AudioClassifier::~AudioClassifier() {}
+
+bool AudioClassifier::Analysis(const int16_t* input,
+ int input_length,
+ int channels) {
+ // Must be 20 ms frames at 48 kHz sampling.
+ assert((input_length / channels) == kDefaultFrameSizeSamples);
+
+ // Only mono or stereo are allowed.
+ assert(channels == 1 || channels == 2);
+
+ // Call Opus' classifier, defined in
+ // "third_party/opus/src/src/analysis.h", with lsb_depth = 16.
+ // Also uses a down-mixing function downmix_int, defined in
+ // "third_party/opus/src/src/opus_private.h", with
+ // constants c1 = 0, and c2 = -2.
+ run_analysis(&analysis_state_,
+ celt_mode_,
+ input,
+ kDefaultFrameSizeSamples,
+ kDefaultFrameSizeSamples,
+ 0,
+ -2,
+ channels,
+ kDefaultSampleRateHz,
+ 16,
+ downmix_int,
+ &analysis_info_);
+ music_probability_ = analysis_info_.music_prob;
+ is_music_ = music_probability_ > kDefaultThreshold;
+ return is_music_;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier.h
new file mode 100644
index 00000000000..e7b7807dbed
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_CLASSIFIER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_CLASSIFIER_H_
+
+#if defined(__cplusplus)
+extern "C" {
+#endif
+#include "celt.h"
+#include "analysis.h"
+#include "opus_private.h"
+#if defined(__cplusplus)
+}
+#endif
+
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// This class provides a speech/music classification and is a wrapper over the
+// Opus classifier. It currently only supports 48 kHz mono or stereo with a
+// frame size of 20 ms.
+
+class AudioClassifier {
+ public:
+ AudioClassifier();
+ virtual ~AudioClassifier();
+
+ // Classifies one frame of audio data in input,
+ // input_length : must be channels * 960;
+ // channels : must be 1 (mono) or 2 (stereo).
+ bool Analysis(const int16_t* input, int input_length, int channels);
+
+ // Gets the current classification : true = music, false = speech.
+ virtual bool is_music() const { return is_music_; }
+
+ // Gets the current music probability.
+ float music_probability() const { return music_probability_; }
+
+ private:
+ AnalysisInfo analysis_info_;
+ bool is_music_;
+ float music_probability_;
+ const CELTMode* celt_mode_;
+ TonalityAnalysisState analysis_state_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_CLASSIFIER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier_unittest.cc
new file mode 100644
index 00000000000..cf623ca0884
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_classifier_unittest.cc
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/audio_classifier.h"
+
+#include <math.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <string>
+
+#include "gtest/gtest.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+namespace webrtc {
+
+static const size_t kFrameSize = 960;
+
+TEST(AudioClassifierTest, AllZeroInput) {
+ int16_t in_mono[kFrameSize] = {0};
+
+ // Test all-zero vectors and let the classifier converge from its default
+ // to the expected value.
+ AudioClassifier zero_classifier;
+ for (int i = 0; i < 100; ++i) {
+ zero_classifier.Analysis(in_mono, kFrameSize, 1);
+ }
+ EXPECT_TRUE(zero_classifier.is_music());
+}
+
+void RunAnalysisTest(const std::string& audio_filename,
+ const std::string& data_filename,
+ size_t channels) {
+ AudioClassifier classifier;
+ scoped_ptr<int16_t[]> in(new int16_t[channels * kFrameSize]);
+ bool is_music_ref;
+
+ FILE* audio_file = fopen(audio_filename.c_str(), "rb");
+ ASSERT_TRUE(audio_file != NULL) << "Failed to open file " << audio_filename
+ << std::endl;
+ FILE* data_file = fopen(data_filename.c_str(), "rb");
+ ASSERT_TRUE(audio_file != NULL) << "Failed to open file " << audio_filename
+ << std::endl;
+ while (fread(in.get(), sizeof(int16_t), channels * kFrameSize, audio_file) ==
+ channels * kFrameSize) {
+ bool is_music =
+ classifier.Analysis(in.get(), channels * kFrameSize, channels);
+ EXPECT_EQ(is_music, classifier.is_music());
+ ASSERT_EQ(1u, fread(&is_music_ref, sizeof(is_music_ref), 1, data_file));
+ EXPECT_EQ(is_music_ref, is_music);
+ }
+ fclose(audio_file);
+ fclose(data_file);
+}
+
+TEST(AudioClassifierTest, DoAnalysisMono) {
+ RunAnalysisTest(test::ResourcePath("short_mixed_mono_48", "pcm"),
+ test::ResourcePath("short_mixed_mono_48", "dat"),
+ 1);
+}
+
+TEST(AudioClassifierTest, DoAnalysisStereo) {
+ RunAnalysisTest(test::ResourcePath("short_mixed_stereo_48", "pcm"),
+ test::ResourcePath("short_mixed_stereo_48", "dat"),
+ 2);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder.cc
index 35422e3f9f5..f539bb2e1ed 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder.cc
@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
#include <assert.h>
-#include "webrtc/modules/audio_coding/neteq4/audio_decoder_impl.h"
+#include "webrtc/modules/audio_coding/neteq/audio_decoder_impl.h"
namespace webrtc {
@@ -41,6 +41,16 @@ int AudioDecoder::PacketDuration(const uint8_t* encoded, size_t encoded_len) {
return kNotImplemented;
}
+int AudioDecoder::PacketDurationRedundant(const uint8_t* encoded,
+ size_t encoded_len) const {
+ return kNotImplemented;
+}
+
+bool AudioDecoder::PacketHasFec(const uint8_t* encoded,
+ size_t encoded_len) const {
+ return false;
+}
+
NetEqDecoder AudioDecoder::codec_type() const { return codec_type_; }
bool AudioDecoder::CodecSupported(NetEqDecoder codec_type) {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc
index 5296a1bd0f9..6c7269a35fe 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/audio_decoder_impl.h"
+#include "webrtc/modules/audio_coding/neteq/audio_decoder_impl.h"
#include <assert.h>
#include <string.h> // memmove
@@ -458,6 +458,19 @@ int AudioDecoderOpus::Decode(const uint8_t* encoded, size_t encoded_len,
return ret;
}
+int AudioDecoderOpus::DecodeRedundant(const uint8_t* encoded,
+ size_t encoded_len, int16_t* decoded,
+ SpeechType* speech_type) {
+ int16_t temp_type = 1; // Default is speech.
+ int16_t ret = WebRtcOpus_DecodeFec(static_cast<OpusDecInst*>(state_), encoded,
+ static_cast<int16_t>(encoded_len), decoded,
+ &temp_type);
+ if (ret > 0)
+ ret *= static_cast<int16_t>(channels_); // Return total number of samples.
+ *speech_type = ConvertSpeechType(temp_type);
+ return ret;
+}
+
int AudioDecoderOpus::Init() {
return WebRtcOpus_DecoderInitNew(static_cast<OpusDecInst*>(state_));
}
@@ -467,6 +480,18 @@ int AudioDecoderOpus::PacketDuration(const uint8_t* encoded,
return WebRtcOpus_DurationEst(static_cast<OpusDecInst*>(state_),
encoded, static_cast<int>(encoded_len));
}
+
+int AudioDecoderOpus::PacketDurationRedundant(const uint8_t* encoded,
+ size_t encoded_len) const {
+ return WebRtcOpus_FecDurationEst(encoded, static_cast<int>(encoded_len));
+}
+
+bool AudioDecoderOpus::PacketHasFec(const uint8_t* encoded,
+ size_t encoded_len) const {
+ int fec;
+ fec = WebRtcOpus_PacketHasFec(encoded, static_cast<int>(encoded_len));
+ return (fec == 1);
+}
#endif
AudioDecoderCng::AudioDecoderCng(enum NetEqDecoder type)
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h
index aa35db78082..265d660bd79 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_impl.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_AUDIO_DECODER_IMPL_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_AUDIO_DECODER_IMPL_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_DECODER_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_DECODER_IMPL_H_
#include <assert.h>
@@ -18,8 +18,8 @@
// selection is made in the gypi file instead of in engine_configurations.h.
#include "webrtc/engine_configurations.h"
#endif
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -236,8 +236,13 @@ class AudioDecoderOpus : public AudioDecoder {
virtual ~AudioDecoderOpus();
virtual int Decode(const uint8_t* encoded, size_t encoded_len,
int16_t* decoded, SpeechType* speech_type);
+ virtual int DecodeRedundant(const uint8_t* encoded, size_t encoded_len,
+ int16_t* decoded, SpeechType* speech_type);
virtual int Init();
virtual int PacketDuration(const uint8_t* encoded, size_t encoded_len);
+ virtual int PacketDurationRedundant(const uint8_t* encoded,
+ size_t encoded_len) const;
+ virtual bool PacketHasFec(const uint8_t* encoded, size_t encoded_len) const;
private:
DISALLOW_COPY_AND_ASSIGN(AudioDecoderOpus);
@@ -268,4 +273,4 @@ class AudioDecoderCng : public AudioDecoder {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_AUDIO_DECODER_IMPL_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_DECODER_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc
index dbd9d121f4a..f82644cbc26 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_unittest.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/audio_decoder_impl.h"
+#include "webrtc/modules/audio_coding/neteq/audio_decoder_impl.h"
#include <assert.h>
#include <stdlib.h>
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_unittests.isolate b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_unittests.isolate
index bb57e74b34f..bb57e74b34f 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_decoder_unittests.isolate
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_decoder_unittests.isolate
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc
index baa912c860e..5a208a6972a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
#include <assert.h>
@@ -74,8 +74,7 @@ void AudioMultiVector::PushBackInterleaved(const int16_t* append_this,
return;
}
size_t length_per_channel = length / num_channels_;
- int16_t* temp_array =
- new int16_t[length_per_channel]; // Intermediate storage.
+ int16_t* temp_array = new int16_t[length_per_channel]; // Temporary storage.
for (size_t channel = 0; channel < num_channels_; ++channel) {
// Copy elements to |temp_array|.
// Set |source_ptr| to first element of this channel.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.h
index 2d0a7494912..908de936d54 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector.h
@@ -8,15 +8,15 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_AUDIO_MULTI_VECTOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_AUDIO_MULTI_VECTOR_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_MULTI_VECTOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_MULTI_VECTOR_H_
#include <string.h> // Access to size_t.
#include <vector>
-#include "webrtc/modules/audio_coding/neteq4/audio_vector.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_vector.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -131,4 +131,4 @@ class AudioMultiVector {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_AUDIO_MULTI_VECTOR_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_MULTI_VECTOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector_unittest.cc
index be05a8260f9..94760385264 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_multi_vector_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_multi_vector_unittest.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
#include <assert.h>
#include <stdlib.h>
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.cc
index cbd46163068..d9fb4e58c24 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/audio_vector.h"
+#include "webrtc/modules/audio_coding/neteq/audio_vector.h"
#include <assert.h>
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.h
index 66bd518a806..f8aabdb8e8e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector.h
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_AUDIO_VECTOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_AUDIO_VECTOR_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_VECTOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_VECTOR_H_
#include <string.h> // Access to size_t.
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
@@ -117,4 +117,4 @@ class AudioVector {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_AUDIO_VECTOR_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_AUDIO_VECTOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector_unittest.cc
index de5aac2d955..50da1fb46c4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/audio_vector_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/audio_vector_unittest.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/audio_vector.h"
+#include "webrtc/modules/audio_coding/neteq/audio_vector.h"
#include <assert.h>
#include <stdlib.h>
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/automode.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/automode.c
deleted file mode 100644
index 4dbd81ed665..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/automode.c
+++ /dev/null
@@ -1,783 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the implementation of automatic buffer level optimization.
- */
-
-#include "automode.h"
-
-#include <assert.h>
-
-#include "signal_processing_library.h"
-
-#include "neteq_defines.h"
-
-#ifdef NETEQ_DELAY_LOGGING
-/* special code for offline delay logging */
-#include <stdio.h>
-#include "delay_logging.h"
-
-extern FILE *delay_fid2; /* file pointer to delay log file */
-#endif /* NETEQ_DELAY_LOGGING */
-
-// These two functions are copied from module_common_types.h, but adapted for C.
-int WebRtcNetEQ_IsNewerSequenceNumber(uint16_t sequence_number,
- uint16_t prev_sequence_number) {
- return sequence_number != prev_sequence_number &&
- ((uint16_t) (sequence_number - prev_sequence_number)) < 0x8000;
-}
-
-int WebRtcNetEQ_IsNewerTimestamp(uint32_t timestamp, uint32_t prev_timestamp) {
- return timestamp != prev_timestamp &&
- ((uint32_t) (timestamp - prev_timestamp)) < 0x80000000;
-}
-
-int WebRtcNetEQ_UpdateIatStatistics(AutomodeInst_t *inst, int maxBufLen,
- uint16_t seqNumber, uint32_t timeStamp,
- int32_t fsHz, int mdCodec, int streamingMode)
-{
- uint32_t timeIat; /* inter-arrival time */
- int i;
- int32_t tempsum = 0; /* temp summation */
- int32_t tempvar; /* temporary variable */
- int retval = 0; /* return value */
- int16_t packetLenSamp; /* packet speech length in samples */
-
- /****************/
- /* Sanity check */
- /****************/
-
- if (maxBufLen <= 1 || fsHz <= 0)
- {
- /* maxBufLen must be at least 2 and fsHz must both be strictly positive */
- return -1;
- }
-
- /****************************/
- /* Update packet statistics */
- /****************************/
-
- /* Try calculating packet length from current and previous timestamps */
- if (!WebRtcNetEQ_IsNewerTimestamp(timeStamp, inst->lastTimeStamp) ||
- !WebRtcNetEQ_IsNewerSequenceNumber(seqNumber, inst->lastSeqNo))
- {
- /* Wrong timestamp or sequence order; revert to backup plan */
- packetLenSamp = inst->packetSpeechLenSamp; /* use stored value */
- }
- else
- {
- /* calculate timestamps per packet */
- packetLenSamp = (int16_t) WebRtcSpl_DivU32U16(timeStamp - inst->lastTimeStamp,
- seqNumber - inst->lastSeqNo);
- }
-
- /* Check that the packet size is positive; if not, the statistics cannot be updated. */
- if (inst->firstPacketReceived && packetLenSamp > 0)
- { /* packet size ok */
-
- /* calculate inter-arrival time in integer packets (rounding down) */
- timeIat = WebRtcSpl_DivW32W16(inst->packetIatCountSamp, packetLenSamp);
-
- /* Special operations for streaming mode */
- if (streamingMode != 0)
- {
- /*
- * Calculate IAT in Q8, including fractions of a packet (i.e., more accurate
- * than timeIat).
- */
- int16_t timeIatQ8 = (int16_t) WebRtcSpl_DivW32W16(
- WEBRTC_SPL_LSHIFT_W32(inst->packetIatCountSamp, 8), packetLenSamp);
-
- /*
- * Calculate cumulative sum iat with sequence number compensation (ideal arrival
- * times makes this sum zero).
- */
- inst->cSumIatQ8 += (timeIatQ8
- - WEBRTC_SPL_LSHIFT_W32(seqNumber - inst->lastSeqNo, 8));
-
- /* subtract drift term */
- inst->cSumIatQ8 -= CSUM_IAT_DRIFT;
-
- /* ensure not negative */
- inst->cSumIatQ8 = WEBRTC_SPL_MAX(inst->cSumIatQ8, 0);
-
- /* remember max */
- if (inst->cSumIatQ8 > inst->maxCSumIatQ8)
- {
- inst->maxCSumIatQ8 = inst->cSumIatQ8;
- inst->maxCSumUpdateTimer = 0;
- }
-
- /* too long since the last maximum was observed; decrease max value */
- if (inst->maxCSumUpdateTimer > (uint32_t) WEBRTC_SPL_MUL_32_16(fsHz,
- MAX_STREAMING_PEAK_PERIOD))
- {
- inst->maxCSumIatQ8 -= 4; /* remove 1000*4/256 = 15.6 ms/s */
- }
- } /* end of streaming mode */
-
- /* check for discontinuous packet sequence and re-ordering */
- if (WebRtcNetEQ_IsNewerSequenceNumber(seqNumber, inst->lastSeqNo + 1))
- {
- /* Compensate for gap in the sequence numbers.
- * Reduce IAT with expected extra time due to lost packets, but ensure that
- * the IAT is not negative.
- */
- timeIat -= WEBRTC_SPL_MIN(timeIat,
- (uint16_t) (seqNumber - (uint16_t) (inst->lastSeqNo + 1)));
- }
- else if (!WebRtcNetEQ_IsNewerSequenceNumber(seqNumber, inst->lastSeqNo))
- {
- /* compensate for re-ordering */
- timeIat += (uint16_t) (inst->lastSeqNo + 1 - seqNumber);
- }
-
- /* saturate IAT at maximum value */
- timeIat = WEBRTC_SPL_MIN( timeIat, MAX_IAT );
-
- /* update iatProb = forgetting_factor * iatProb for all elements */
- for (i = 0; i <= MAX_IAT; i++)
- {
- int32_t tempHi, tempLo; /* Temporary variables */
-
- /*
- * Multiply iatProbFact (Q15) with iatProb (Q30) and right-shift 15 steps
- * to come back to Q30. The operation is done in two steps:
- */
-
- /*
- * 1) Multiply the high 16 bits (15 bits + sign) of iatProb. Shift iatProb
- * 16 steps right to get the high 16 bits in a int16_t prior to
- * multiplication, and left-shift with 1 afterwards to come back to
- * Q30 = (Q15 * (Q30>>16)) << 1.
- */
- tempHi = WEBRTC_SPL_MUL_16_16(inst->iatProbFact,
- (int16_t) WEBRTC_SPL_RSHIFT_W32(inst->iatProb[i], 16));
- tempHi = WEBRTC_SPL_LSHIFT_W32(tempHi, 1); /* left-shift 1 step */
-
- /*
- * 2) Isolate and multiply the low 16 bits of iatProb. Right-shift 15 steps
- * afterwards to come back to Q30 = (Q15 * Q30) >> 15.
- */
- tempLo = inst->iatProb[i] & 0x0000FFFF; /* sift out the 16 low bits */
- tempLo = WEBRTC_SPL_MUL_16_U16(inst->iatProbFact,
- (uint16_t) tempLo);
- tempLo = WEBRTC_SPL_RSHIFT_W32(tempLo, 15);
-
- /* Finally, add the high and low parts */
- inst->iatProb[i] = tempHi + tempLo;
-
- /* Sum all vector elements while we are at it... */
- tempsum += inst->iatProb[i];
- }
-
- /*
- * Increase the probability for the currently observed inter-arrival time
- * with 1 - iatProbFact. The factor is in Q15, iatProb in Q30;
- * hence, left-shift 15 steps to obtain result in Q30.
- */
- inst->iatProb[timeIat] += (32768 - inst->iatProbFact) << 15;
-
- tempsum += (32768 - inst->iatProbFact) << 15; /* add to vector sum */
-
- /*
- * Update iatProbFact (changes only during the first seconds after reset)
- * The factor converges to IAT_PROB_FACT.
- */
- inst->iatProbFact += (IAT_PROB_FACT - inst->iatProbFact + 3) >> 2;
-
- /* iatProb should sum up to 1 (in Q30). */
- tempsum -= 1 << 30; /* should be zero */
-
- /* Check if it does, correct if it doesn't. */
- if (tempsum > 0)
- {
- /* tempsum too large => decrease a few values in the beginning */
- i = 0;
- while (i <= MAX_IAT && tempsum > 0)
- {
- /* Remove iatProb[i] / 16 from iatProb, but not more than tempsum */
- tempvar = WEBRTC_SPL_MIN(tempsum, inst->iatProb[i] >> 4);
- inst->iatProb[i++] -= tempvar;
- tempsum -= tempvar;
- }
- }
- else if (tempsum < 0)
- {
- /* tempsum too small => increase a few values in the beginning */
- i = 0;
- while (i <= MAX_IAT && tempsum < 0)
- {
- /* Add iatProb[i] / 16 to iatProb, but not more than tempsum */
- tempvar = WEBRTC_SPL_MIN(-tempsum, inst->iatProb[i] >> 4);
- inst->iatProb[i++] += tempvar;
- tempsum += tempvar;
- }
- }
-
- /* Calculate optimal buffer level based on updated statistics */
- tempvar = (int32_t) WebRtcNetEQ_CalcOptimalBufLvl(inst, fsHz, mdCodec, timeIat,
- streamingMode);
- if (tempvar > 0)
- {
- int high_lim_delay;
- /* Convert the minimum delay from milliseconds to packets in Q8.
- * |fsHz| is sampling rate in Hertz, and |packetLenSamp|
- * is the number of samples per packet (according to the last
- * decoding).
- */
- int32_t minimum_delay_q8 = ((inst->minimum_delay_ms *
- (fsHz / 1000)) << 8) / packetLenSamp;
-
- int32_t maximum_delay_q8 = ((inst->maximum_delay_ms *
- (fsHz / 1000)) << 8) / packetLenSamp;
-
- inst->optBufLevel = tempvar;
-
- if (streamingMode != 0)
- {
- inst->optBufLevel = WEBRTC_SPL_MAX(inst->optBufLevel,
- inst->maxCSumIatQ8);
- }
-
- /* The required delay. */
- inst->required_delay_q8 = inst->optBufLevel;
-
- // Maintain the target delay.
- inst->optBufLevel = WEBRTC_SPL_MAX(inst->optBufLevel,
- minimum_delay_q8);
-
- if (maximum_delay_q8 > 0) {
- // Make sure that max is at least one packet length.
- maximum_delay_q8 = WEBRTC_SPL_MAX(maximum_delay_q8, (1 << 8));
- inst->optBufLevel = WEBRTC_SPL_MIN(inst->optBufLevel,
- maximum_delay_q8);
- }
- /*********/
- /* Limit */
- /*********/
-
- /* Subtract extra delay from maxBufLen */
- if (inst->extraDelayMs > 0 && inst->packetSpeechLenSamp > 0)
- {
- maxBufLen -= inst->extraDelayMs / inst->packetSpeechLenSamp * fsHz / 1000;
- maxBufLen = WEBRTC_SPL_MAX(maxBufLen, 1); // sanity: at least one packet
- }
-
- maxBufLen = WEBRTC_SPL_LSHIFT_W32(maxBufLen, 8); /* shift to Q8 */
-
- /* Enforce upper limit; 75% of maxBufLen */
- /* 1/2 + 1/4 = 75% */
- high_lim_delay = (maxBufLen >> 1) + (maxBufLen >> 2);
- inst->optBufLevel = WEBRTC_SPL_MIN(inst->optBufLevel,
- high_lim_delay);
- inst->required_delay_q8 = WEBRTC_SPL_MIN(inst->required_delay_q8,
- high_lim_delay);
- }
- else
- {
- retval = (int) tempvar;
- }
-
- } /* end if */
-
- /*******************************/
- /* Update post-call statistics */
- /*******************************/
-
- /* Calculate inter-arrival time in ms = packetIatCountSamp / (fsHz / 1000) */
- timeIat = WEBRTC_SPL_UDIV(
- WEBRTC_SPL_UMUL_32_16(inst->packetIatCountSamp, (int16_t) 1000),
- (uint32_t) fsHz);
-
- /* Increase counter corresponding to current inter-arrival time */
- if (timeIat > 2000)
- {
- inst->countIAT2000ms++;
- }
- else if (timeIat > 1000)
- {
- inst->countIAT1000ms++;
- }
- else if (timeIat > 500)
- {
- inst->countIAT500ms++;
- }
-
- if (timeIat > inst->longestIATms)
- {
- /* update maximum value */
- inst->longestIATms = timeIat;
- }
-
- /***********************************/
- /* Prepare for next packet arrival */
- /***********************************/
-
- inst->packetIatCountSamp = 0; /* reset inter-arrival time counter */
-
- inst->lastSeqNo = seqNumber; /* remember current sequence number */
-
- inst->lastTimeStamp = timeStamp; /* remember current timestamp */
-
- inst->firstPacketReceived = 1;
-
- return retval;
-}
-
-
-int16_t WebRtcNetEQ_CalcOptimalBufLvl(AutomodeInst_t *inst, int32_t fsHz,
- int mdCodec, uint32_t timeIatPkts,
- int streamingMode)
-{
-
- int32_t sum1 = 1 << 30; /* assign to 1 in Q30 */
- int16_t B;
- uint16_t Bopt;
- int i;
- int32_t betaInv; /* optimization parameter */
-
-#ifdef NETEQ_DELAY_LOGGING
- /* special code for offline delay logging */
- int temp_var;
-#endif
-
- /****************/
- /* Sanity check */
- /****************/
-
- if (fsHz <= 0)
- {
- /* fsHz must be strictly positive */
- return -1;
- }
-
- /***********************************************/
- /* Get betaInv parameter based on playout mode */
- /***********************************************/
-
- if (streamingMode)
- {
- /* streaming (listen-only) mode */
- betaInv = AUTOMODE_STREAMING_BETA_INV_Q30;
- }
- else
- {
- /* normal mode */
- betaInv = AUTOMODE_BETA_INV_Q30;
- }
-
- /*******************************************************************/
- /* Calculate optimal buffer level without considering jitter peaks */
- /*******************************************************************/
-
- /*
- * Find the B for which the probability of observing an inter-arrival time larger
- * than or equal to B is less than or equal to betaInv.
- */
- B = 0; /* start from the beginning of iatProb */
- sum1 -= inst->iatProb[B]; /* ensure that optimal level is not less than 1 */
-
- do
- {
- /*
- * Subtract the probabilities one by one until the sum is no longer greater
- * than betaInv.
- */
- sum1 -= inst->iatProb[++B];
- }
- while ((sum1 > betaInv) && (B < MAX_IAT));
-
- Bopt = B; /* This is our primary value for the optimal buffer level Bopt */
-
- if (mdCodec)
- {
- /*
- * Use alternative cost function when multiple description codec is in use.
- * Do not have to re-calculate all points, just back off a few steps from
- * previous value of B.
- */
- int32_t sum2 = sum1; /* copy sum1 */
-
- while ((sum2 <= betaInv + inst->iatProb[Bopt]) && (Bopt > 0))
- {
- /* Go backwards in the sum until the modified cost function solution is found */
- sum2 += inst->iatProb[Bopt--];
- }
-
- Bopt++; /* This is the optimal level when using an MD codec */
-
- /* Now, Bopt and B can have different values. */
- }
-
-#ifdef NETEQ_DELAY_LOGGING
- /* special code for offline delay logging */
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_OPTBUF;
- if (fwrite( &temp_var, sizeof(int), 1, delay_fid2 ) != 1) {
- return -1;
- }
- temp_var = (int) (Bopt * inst->packetSpeechLenSamp);
-#endif
-
- /******************************************************************/
- /* Make levelFiltFact adaptive: Larger B <=> larger levelFiltFact */
- /******************************************************************/
-
- switch (B)
- {
- case 0:
- case 1:
- {
- inst->levelFiltFact = 251;
- break;
- }
- case 2:
- case 3:
- {
- inst->levelFiltFact = 252;
- break;
- }
- case 4:
- case 5:
- case 6:
- case 7:
- {
- inst->levelFiltFact = 253;
- break;
- }
- default: /* B > 7 */
- {
- inst->levelFiltFact = 254;
- break;
- }
- }
-
- /************************/
- /* Peak mode operations */
- /************************/
-
- /* Compare current IAT with peak threshold
- *
- * If IAT > optimal level + threshold (+1 for MD codecs)
- * or if IAT > 2 * optimal level (note: optimal level is in Q8):
- */
- if (timeIatPkts > (uint32_t) (Bopt + inst->peakThresholdPkt + (mdCodec != 0))
- || timeIatPkts > (uint32_t) WEBRTC_SPL_LSHIFT_U16(Bopt, 1))
- {
- /* A peak is observed */
-
- if (inst->peakIndex == -1)
- {
- /* this is the first peak; prepare for next peak */
- inst->peakIndex = 0;
- /* set the mode-disable counter */
- inst->peakModeDisabled = WEBRTC_SPL_LSHIFT_W16(1, NUM_PEAKS_REQUIRED-2);
- }
- else if (inst->peakIatCountSamp
- <=
- (uint32_t) WEBRTC_SPL_MUL_32_16(fsHz, MAX_PEAK_PERIOD))
- {
- /* This is not the first peak and the period time is valid */
-
- /* store time elapsed since last peak */
- inst->peakPeriodSamp[inst->peakIndex] = inst->peakIatCountSamp;
-
- /* saturate height to 16 bits */
- inst->peakHeightPkt[inst->peakIndex]
- =
- (int16_t) WEBRTC_SPL_MIN(timeIatPkts, WEBRTC_SPL_WORD16_MAX);
-
- /* increment peakIndex and wrap/modulo */
- inst->peakIndex = (inst->peakIndex + 1) & PEAK_INDEX_MASK;
-
- /* process peak vectors */
- inst->curPeakHeight = 0;
- inst->curPeakPeriod = 0;
-
- for (i = 0; i < NUM_PEAKS; i++)
- {
- /* Find maximum of peak heights and peak periods */
- inst->curPeakHeight
- = WEBRTC_SPL_MAX(inst->curPeakHeight, inst->peakHeightPkt[i]);
- inst->curPeakPeriod
- = WEBRTC_SPL_MAX(inst->curPeakPeriod, inst->peakPeriodSamp[i]);
-
- }
-
- inst->peakModeDisabled >>= 1; /* decrease mode-disable "counter" */
-
- }
- else if (inst->peakIatCountSamp > (uint32_t) WEBRTC_SPL_MUL_32_16(fsHz,
- WEBRTC_SPL_LSHIFT_W16(MAX_PEAK_PERIOD, 1)))
- {
- /*
- * More than 2 * MAX_PEAK_PERIOD has elapsed since last peak;
- * too long time => reset peak statistics
- */
- inst->curPeakHeight = 0;
- inst->curPeakPeriod = 0;
- for (i = 0; i < NUM_PEAKS; i++)
- {
- inst->peakHeightPkt[i] = 0;
- inst->peakPeriodSamp[i] = 0;
- }
-
- inst->peakIndex = -1; /* Next peak is first peak */
- inst->peakIatCountSamp = 0;
- }
-
- inst->peakIatCountSamp = 0; /* Reset peak interval timer */
- } /* end if peak is observed */
-
- /* Evaluate peak mode conditions */
-
- /*
- * If not disabled (enough peaks have been observed) and
- * time since last peak is less than two peak periods.
- */
- inst->peakFound = 0;
- if ((!inst->peakModeDisabled) && (inst->peakIatCountSamp
- <= WEBRTC_SPL_LSHIFT_W32(inst->curPeakPeriod , 1)))
- {
- /* Engage peak mode */
- inst->peakFound = 1;
- /* Set optimal buffer level to curPeakHeight (if it's not already larger) */
- Bopt = WEBRTC_SPL_MAX(Bopt, inst->curPeakHeight);
-
-#ifdef NETEQ_DELAY_LOGGING
- /* special code for offline delay logging */
- temp_var = (int) -(Bopt * inst->packetSpeechLenSamp);
-#endif
- }
-
- /* Scale Bopt to Q8 */
- Bopt = WEBRTC_SPL_LSHIFT_U16(Bopt,8);
-
-#ifdef NETEQ_DELAY_LOGGING
- /* special code for offline delay logging */
- if (fwrite( &temp_var, sizeof(int), 1, delay_fid2 ) != 1) {
- return -1;
- }
-#endif
-
- /* Sanity check: Bopt must be strictly positive */
- if (Bopt <= 0)
- {
- Bopt = WEBRTC_SPL_LSHIFT_W16(1, 8); /* 1 in Q8 */
- }
-
- return Bopt; /* return value in Q8 */
-}
-
-
-int WebRtcNetEQ_BufferLevelFilter(int32_t curSizeMs8, AutomodeInst_t *inst,
- int sampPerCall, int16_t fsMult)
-{
-
- int16_t curSizeFrames;
-
- /****************/
- /* Sanity check */
- /****************/
-
- if (sampPerCall <= 0 || fsMult <= 0)
- {
- /* sampPerCall and fsMult must both be strictly positive */
- return -1;
- }
-
- /* Check if packet size has been detected */
- if (inst->packetSpeechLenSamp > 0)
- {
- /*
- * Current buffer level in packet lengths
- * = (curSizeMs8 * fsMult) / packetSpeechLenSamp
- */
- curSizeFrames = (int16_t) WebRtcSpl_DivW32W16(
- WEBRTC_SPL_MUL_32_16(curSizeMs8, fsMult), inst->packetSpeechLenSamp);
- }
- else
- {
- curSizeFrames = 0;
- }
-
- /* Filter buffer level */
- if (inst->levelFiltFact > 0) /* check that filter factor is set */
- {
- /* Filter:
- * buffLevelFilt = levelFiltFact * buffLevelFilt
- * + (1-levelFiltFact) * curSizeFrames
- *
- * levelFiltFact is in Q8
- */
- inst->buffLevelFilt = ((inst->levelFiltFact * inst->buffLevelFilt) >> 8) +
- (256 - inst->levelFiltFact) * curSizeFrames;
- }
-
- /* Account for time-scale operations (accelerate and pre-emptive expand) */
- if (inst->prevTimeScale)
- {
- /*
- * Time-scaling has been performed since last filter update.
- * Subtract the sampleMemory from buffLevelFilt after converting sampleMemory
- * from samples to packets in Q8. Make sure that the filtered value is
- * non-negative.
- */
- inst->buffLevelFilt = WEBRTC_SPL_MAX( inst->buffLevelFilt -
- WebRtcSpl_DivW32W16(
- WEBRTC_SPL_LSHIFT_W32(inst->sampleMemory, 8), /* sampleMemory in Q8 */
- inst->packetSpeechLenSamp ), /* divide by packetSpeechLenSamp */
- 0);
-
- /*
- * Reset flag and set timescaleHoldOff timer to prevent further time-scaling
- * for some time.
- */
- inst->prevTimeScale = 0;
- inst->timescaleHoldOff = AUTOMODE_TIMESCALE_LIMIT;
- }
-
- /* Update time counters and HoldOff timer */
- inst->packetIatCountSamp += sampPerCall; /* packet inter-arrival time */
- inst->peakIatCountSamp += sampPerCall; /* peak inter-arrival time */
- inst->timescaleHoldOff >>= 1; /* time-scaling limiter */
- inst->maxCSumUpdateTimer += sampPerCall; /* cumulative-sum timer */
-
- return 0;
-
-}
-
-
-int WebRtcNetEQ_SetPacketSpeechLen(AutomodeInst_t *inst, int16_t newLenSamp,
- int32_t fsHz)
-{
-
- /* Sanity check for newLenSamp and fsHz */
- if (newLenSamp <= 0 || fsHz <= 0)
- {
- return -1;
- }
-
- inst->packetSpeechLenSamp = newLenSamp; /* Store packet size in instance */
-
- /* Make NetEQ wait for first regular packet before starting the timer */
- inst->lastPackCNGorDTMF = 1;
-
- inst->packetIatCountSamp = 0; /* Reset packet time counter */
-
- /*
- * Calculate peak threshold from packet size. The threshold is defined as
- * the (fractional) number of packets that corresponds to PEAK_HEIGHT
- * (in Q8 seconds). That is, threshold = PEAK_HEIGHT/256 * fsHz / packLen.
- */
- inst->peakThresholdPkt = (uint16_t) WebRtcSpl_DivW32W16ResW16(
- WEBRTC_SPL_MUL_16_16_RSFT(PEAK_HEIGHT,
- (int16_t) WEBRTC_SPL_RSHIFT_W32(fsHz, 6), 2), inst->packetSpeechLenSamp);
-
- return 0;
-}
-
-
-int WebRtcNetEQ_ResetAutomode(AutomodeInst_t *inst, int maxBufLenPackets)
-{
-
- int i;
- uint16_t tempprob = 0x4002; /* 16384 + 2 = 100000000000010 binary; */
-
- /* Sanity check for maxBufLenPackets */
- if (maxBufLenPackets <= 1)
- {
- /* Invalid value; set to 10 instead (arbitary small number) */
- maxBufLenPackets = 10;
- }
-
- /* Reset filtered buffer level */
- inst->buffLevelFilt = 0;
-
- /* Reset packet size to unknown */
- inst->packetSpeechLenSamp = 0;
-
- /*
- * Flag that last packet was special payload, so that automode will treat the next speech
- * payload as the first payload received.
- */
- inst->lastPackCNGorDTMF = 1;
-
- /* Reset peak detection parameters */
- inst->peakModeDisabled = 1; /* disable peak mode */
- inst->peakIatCountSamp = 0;
- inst->peakIndex = -1; /* indicates that no peak is registered */
- inst->curPeakHeight = 0;
- inst->curPeakPeriod = 0;
- for (i = 0; i < NUM_PEAKS; i++)
- {
- inst->peakHeightPkt[i] = 0;
- inst->peakPeriodSamp[i] = 0;
- }
-
- /*
- * Set the iatProb PDF vector to an exponentially decaying distribution
- * iatProb[i] = 0.5^(i+1), i = 0, 1, 2, ...
- * iatProb is in Q30.
- */
- for (i = 0; i <= MAX_IAT; i++)
- {
- /* iatProb[i] = 0.5^(i+1) = iatProb[i-1] / 2 */
- tempprob = WEBRTC_SPL_RSHIFT_U16(tempprob, 1);
- /* store in PDF vector */
- inst->iatProb[i] = WEBRTC_SPL_LSHIFT_W32((int32_t) tempprob, 16);
- }
-
- /*
- * Calculate the optimal buffer level corresponding to the initial PDF.
- * No need to call WebRtcNetEQ_CalcOptimalBufLvl() since we have just hard-coded
- * all the variables that the buffer level depends on => we know the result
- */
- inst->optBufLevel = WEBRTC_SPL_MIN(4,
- (maxBufLenPackets >> 1) + (maxBufLenPackets >> 1)); /* 75% of maxBufLenPackets */
- inst->required_delay_q8 = inst->optBufLevel;
- inst->levelFiltFact = 253;
-
- /*
- * Reset the iat update forgetting factor to 0 to make the impact of the first
- * incoming packets greater.
- */
- inst->iatProbFact = 0;
-
- /* Reset packet inter-arrival time counter */
- inst->packetIatCountSamp = 0;
-
- /* Clear time-scaling related variables */
- inst->prevTimeScale = 0;
- inst->timescaleHoldOff = AUTOMODE_TIMESCALE_LIMIT; /* don't allow time-scaling immediately */
-
- inst->cSumIatQ8 = 0;
- inst->maxCSumIatQ8 = 0;
-
- return 0;
-}
-
-int32_t WebRtcNetEQ_AverageIAT(const AutomodeInst_t *inst) {
- int i;
- int32_t sum_q24 = 0;
- assert(inst);
- for (i = 0; i <= MAX_IAT; ++i) {
- /* Shift 6 to fit worst case: 2^30 * 64. */
- sum_q24 += (inst->iatProb[i] >> 6) * i;
- }
- /* Subtract the nominal inter-arrival time 1 = 2^24 in Q24. */
- sum_q24 -= (1 << 24);
- /*
- * Multiply with 1000000 / 2^24 = 15625 / 2^18 to get in parts-per-million.
- * Shift 7 to Q17 first, then multiply with 15625 and shift another 11.
- */
- return ((sum_q24 >> 7) * 15625) >> 11;
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/automode.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/automode.h
deleted file mode 100644
index c5dd829b834..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/automode.h
+++ /dev/null
@@ -1,274 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the functionality for automatic buffer level optimization.
- */
-
-#ifndef AUTOMODE_H
-#define AUTOMODE_H
-
-#include "typedefs.h"
-
-/*************/
-/* Constants */
-/*************/
-
-/* The beta parameter defines the trade-off between delay and underrun probability. */
-/* It is defined through its inverse in Q30 */
-#define AUTOMODE_BETA_INV_Q30 53687091 /* 1/20 in Q30 */
-#define AUTOMODE_STREAMING_BETA_INV_Q30 536871 /* 1/2000 in Q30 */
-
-/* Forgetting factor for the inter-arrival time statistics */
-#define IAT_PROB_FACT 32745 /* 0.9993 in Q15 */
-
-/* Maximum inter-arrival time to register (in "packet-times") */
-#define MAX_IAT 64
-#define PEAK_HEIGHT 20 /* 0.08s in Q8 */
-
-/* The value (1<<5) sets maximum accelerate "speed" to about 100 ms/s */
-#define AUTOMODE_TIMESCALE_LIMIT (1<<5)
-
-/* Peak mode related parameters */
-/* Number of peaks in peak vector; must be a power of 2 */
-#define NUM_PEAKS 8
-
-/* Must be NUM_PEAKS-1 */
-#define PEAK_INDEX_MASK 0x0007
-
-/* Longest accepted peak distance */
-#define MAX_PEAK_PERIOD 10
-#define MAX_STREAMING_PEAK_PERIOD 600 /* 10 minutes */
-
-/* Number of peaks required before peak mode can be engaged */
-#define NUM_PEAKS_REQUIRED 3
-
-/* Drift term for cumulative sum */
-#define CSUM_IAT_DRIFT 2
-
-/*******************/
-/* Automode struct */
-/*******************/
-
-/* The automode struct is a sub-struct of the
- bufstats-struct (BufstatsInst_t). */
-
-typedef struct
-{
-
- /* Filtered current buffer level */
- uint16_t levelFiltFact; /* filter forgetting factor in Q8 */
- int buffLevelFilt; /* filtered buffer level in Q8 */
-
- /* Inter-arrival time (iat) statistics */
- int32_t iatProb[MAX_IAT + 1]; /* iat probabilities in Q30 */
- int16_t iatProbFact; /* iat forgetting factor in Q15 */
- uint32_t packetIatCountSamp; /* time (in timestamps) elapsed since last
- packet arrival, based on RecOut calls */
- int optBufLevel; /* current optimal buffer level in Q8 */
-
- /* Packet related information */
- int16_t packetSpeechLenSamp; /* speech samples per incoming packet */
- int16_t lastPackCNGorDTMF; /* indicates that the last received packet
- contained special information */
- uint16_t lastSeqNo; /* sequence number for last packet received */
- uint32_t lastTimeStamp; /* timestamp for the last packet received */
- int firstPacketReceived; /* set to zero implicitly when the instance is
- filled with zeros */
- int32_t sampleMemory; /* memory position for keeping track of how many
- samples we cut during expand */
- int16_t prevTimeScale; /* indicates that the last mode was an accelerate
- or pre-emptive expand operation */
- uint32_t timescaleHoldOff; /* counter that is shifted one step right each
- RecOut call; time-scaling allowed when it has
- reached 0 */
- int16_t extraDelayMs; /* extra delay for sync with video */
-
- int minimum_delay_ms; /* Desired delay, NetEq maintains this amount of
- delay unless jitter statistics suggests a higher value. */
- int maximum_delay_ms; /* Max desired delay, NetEq will not go above this
- amount of delay even if jitter statistics suggests a higher value. */
-
- int required_delay_q8; /* Smallest delay required. This is computed
- according to inter-arrival time and playout mode. It has the same unit
- as |optBufLevel|. */
-
- /* Peak-detection */
- /* vector with the latest peak periods (peak spacing in samples) */
- uint32_t peakPeriodSamp[NUM_PEAKS];
- /* vector with the latest peak heights (in packets) */
- int16_t peakHeightPkt[NUM_PEAKS];
- int16_t peakIndex; /* index for the vectors peakPeriodSamp and peakHeightPkt;
- -1 if still waiting for first peak */
- uint16_t peakThresholdPkt; /* definition of peak (in packets);
- calculated from PEAK_HEIGHT */
- uint32_t peakIatCountSamp; /* samples elapsed since last peak was observed */
- uint32_t curPeakPeriod; /* current maximum of peakPeriodSamp vector */
- int16_t curPeakHeight; /* derived from peakHeightPkt vector;
- used as optimal buffer level in peak mode */
- int16_t peakModeDisabled; /* ==0 if peak mode can be engaged; >0 if not */
- uint16_t peakFound; /* 1 if peaks are detected and extra delay is applied;
- * 0 otherwise. */
-
- /* Post-call statistics */
- uint32_t countIAT500ms; /* number of times we got small network outage */
- uint32_t countIAT1000ms; /* number of times we got medium network outage */
- uint32_t countIAT2000ms; /* number of times we got large network outage */
- uint32_t longestIATms; /* mSec duration of longest network outage */
-
- int16_t cSumIatQ8; /* cumulative sum of inter-arrival times */
- int16_t maxCSumIatQ8; /* max cumulative sum IAT */
- uint32_t maxCSumUpdateTimer;/* time elapsed since maximum was observed */
-} AutomodeInst_t;
-
-/*************/
-/* Functions */
-/*************/
-
-/****************************************************************************
- * WebRtcNetEQ_UpdateIatStatistics(...)
- *
- * Update the packet inter-arrival time statistics when a new packet arrives.
- * This function should be called for every arriving packet, with some
- * exceptions when using DTX/VAD and DTMF. A new optimal buffer level is
- * calculated after the update.
- *
- * Input:
- * - inst : Automode instance
- * - maxBufLen : Maximum number of packets the buffer can hold
- * - seqNumber : RTP sequence number of incoming packet
- * - timeStamp : RTP timestamp of incoming packet
- * - fsHz : Sample rate in Hz
- * - mdCodec : Non-zero if the current codec is a multiple-
- * description codec
- * - streamingMode : A non-zero value will increase jitter robustness (and delay)
- *
- * Output:
- * - inst : Updated automode instance
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_UpdateIatStatistics(AutomodeInst_t *inst, int maxBufLen,
- uint16_t seqNumber, uint32_t timeStamp,
- int32_t fsHz, int mdCodec, int streamingMode);
-
-/****************************************************************************
- * WebRtcNetEQ_CalcOptimalBufLvl(...)
- *
- * Calculate the optimal buffer level based on packet inter-arrival time
- * statistics.
- *
- * Input:
- * - inst : Automode instance
- * - fsHz : Sample rate in Hz
- * - mdCodec : Non-zero if the current codec is a multiple-
- * description codec
- * - timeIatPkts : Currently observed inter-arrival time in packets
- * - streamingMode : A non-zero value will increase jitter robustness (and delay)
- *
- * Output:
- * - inst : Updated automode instance
- *
- * Return value : >0 - Optimal buffer level
- * <0 - Error
- */
-
-int16_t WebRtcNetEQ_CalcOptimalBufLvl(AutomodeInst_t *inst, int32_t fsHz,
- int mdCodec, uint32_t timeIatPkts,
- int streamingMode);
-
-/****************************************************************************
- * WebRtcNetEQ_BufferLevelFilter(...)
- *
- * Update filtered buffer level. The function must be called once for each
- * RecOut call, since the timing of automode hinges on counters that are
- * updated by this function.
- *
- * Input:
- * - curSizeMs8 : Total length of unused speech data in packet buffer
- * and sync buffer, in ms * 8
- * - inst : Automode instance
- * - sampPerCall : Number of samples per RecOut call
- * - fsMult : Sample rate in Hz divided by 8000
- *
- * Output:
- * - inst : Updated automode instance
- *
- * Return value : 0 - Ok
- * : <0 - Error
- */
-
-int WebRtcNetEQ_BufferLevelFilter(int32_t curSizeMs8, AutomodeInst_t *inst,
- int sampPerCall, int16_t fsMult);
-
-/****************************************************************************
- * WebRtcNetEQ_SetPacketSpeechLen(...)
- *
- * Provide the number of speech samples extracted from a packet to the
- * automode instance. Several of the calculations within automode depend
- * on knowing the packet size.
- *
- *
- * Input:
- * - inst : Automode instance
- * - newLenSamp : Number of samples per RecOut call
- * - fsHz : Sample rate in Hz
- *
- * Output:
- * - inst : Updated automode instance
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_SetPacketSpeechLen(AutomodeInst_t *inst, int16_t newLenSamp,
- int32_t fsHz);
-
-/****************************************************************************
- * WebRtcNetEQ_ResetAutomode(...)
- *
- * Reset the automode instance.
- *
- *
- * Input:
- * - inst : Automode instance
- * - maxBufLenPackets : Maximum number of packets that the packet
- * buffer can hold (>1)
- *
- * Output:
- * - inst : Updated automode instance
- *
- * Return value : 0 - Ok
- */
-
-int WebRtcNetEQ_ResetAutomode(AutomodeInst_t *inst, int maxBufLenPackets);
-
-/****************************************************************************
- * WebRtcNetEQ_AverageIAT(...)
- *
- * Calculate the average inter-arrival time based on current statistics.
- * The average is expressed in parts per million relative the nominal. That is,
- * if the average inter-arrival time is equal to the nominal frame time,
- * the return value is zero. A positive value corresponds to packet spacing
- * being too large, while a negative value means that the packets arrive with
- * less spacing than expected.
- *
- *
- * Input:
- * - inst : Automode instance.
- *
- * Return value : Average relative inter-arrival time in samples.
- */
-
-int32_t WebRtcNetEQ_AverageIAT(const AutomodeInst_t *inst);
-
-#endif /* AUTOMODE_H */
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.cc
index 2dfb3c1f392..e00c4f65e5c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
#include <assert.h>
#include <string.h> // memcpy
@@ -16,8 +16,8 @@
#include <algorithm> // min, max
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/post_decode_vad.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/post_decode_vad.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.h
index ac5446bf7f6..8fb310ea8b6 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise.h
@@ -8,14 +8,14 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_BACKGROUND_NOISE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_BACKGROUND_NOISE_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_BACKGROUND_NOISE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_BACKGROUND_NOISE_H_
#include <string.h> // size_t
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
@@ -126,7 +126,7 @@ class BackgroundNoise {
int32_t residual_energy);
size_t num_channels_;
- scoped_array<ChannelParameters> channel_parameters_;
+ scoped_ptr<ChannelParameters[]> channel_parameters_;
bool initialized_;
NetEqBackgroundNoiseMode mode_;
@@ -134,4 +134,4 @@ class BackgroundNoise {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_BACKGROUND_NOISE_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_BACKGROUND_NOISE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise_unittest.cc
index eb7b9fa1edd..0aee62c97be 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/background_noise_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/background_noise_unittest.cc
@@ -10,7 +10,7 @@
// Unit tests for BackgroundNoise class.
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
#include "gtest/gtest.h"
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/bgn_update.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/bgn_update.c
deleted file mode 100644
index 4d660ff55b6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/bgn_update.c
+++ /dev/null
@@ -1,247 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the function for updating the background noise estimate.
- */
-
-#include "dsp.h"
-
-#include "signal_processing_library.h"
-
-#include "dsp_helpfunctions.h"
-
-/* Scratch usage:
- Designed for BGN_LPC_ORDER <= 10
-
- Type Name size startpos endpos
- int32_t pw32_autoCorr 22 0 21 (Length (BGN_LPC_ORDER + 1)*2)
- int16_t pw16_tempVec 10 22 31 (Length BGN_LPC_ORDER)
- int16_t pw16_rc 10 32 41 (Length BGN_LPC_ORDER)
- int16_t pw16_outVec 74 0 73 (Length BGN_LPC_ORDER + 64)
-
- Total: 74
- */
-
-#if (BGN_LPC_ORDER > 10) && (defined SCRATCH)
-#error BGN_LPC_ORDER is too large for current scratch memory allocation
-#endif
-
-#define SCRATCH_PW32_AUTO_CORR 0
-#define SCRATCH_PW16_TEMP_VEC 22
-#define SCRATCH_PW16_RC 32
-#define SCRATCH_PW16_OUT_VEC 0
-
-#define NETEQFIX_BGNFRAQINCQ16 229 /* 0.0035 in Q16 */
-
-/****************************************************************************
- * WebRtcNetEQ_BGNUpdate(...)
- *
- * This function updates the background noise parameter estimates.
- *
- * Input:
- * - inst : NetEQ instance, where the speech history is stored.
- * - scratchPtr : Pointer to scratch vector.
- *
- * Output:
- * - inst : Updated information about the BGN characteristics.
- *
- * Return value : No return value
- */
-
-void WebRtcNetEQ_BGNUpdate(
-#ifdef SCRATCH
- DSPInst_t *inst, int16_t *pw16_scratchPtr
-#else
- DSPInst_t *inst
-#endif
-)
-{
- const int16_t w16_vecLen = 256;
- BGNInst_t *BGN_Inst = &(inst->BGNInst);
-#ifdef SCRATCH
- int32_t *pw32_autoCorr = (int32_t*) (pw16_scratchPtr + SCRATCH_PW32_AUTO_CORR);
- int16_t *pw16_tempVec = pw16_scratchPtr + SCRATCH_PW16_TEMP_VEC;
- int16_t *pw16_rc = pw16_scratchPtr + SCRATCH_PW16_RC;
- int16_t *pw16_outVec = pw16_scratchPtr + SCRATCH_PW16_OUT_VEC;
-#else
- int32_t pw32_autoCorr[BGN_LPC_ORDER + 1];
- int16_t pw16_tempVec[BGN_LPC_ORDER];
- int16_t pw16_outVec[BGN_LPC_ORDER + 64];
- int16_t pw16_rc[BGN_LPC_ORDER];
-#endif
- int16_t pw16_A[BGN_LPC_ORDER + 1];
- int32_t w32_tmp;
- int16_t *pw16_vec;
- int16_t w16_maxSample;
- int16_t w16_tmp, w16_tmp2;
- int16_t w16_enSampleShift;
- int32_t w32_en, w32_enBGN;
- int32_t w32_enUpdateThreashold;
- int16_t stability;
-
- pw16_vec = inst->pw16_speechHistory + inst->w16_speechHistoryLen - w16_vecLen;
-
-#ifdef NETEQ_VAD
- if( !inst->VADInst.VADEnabled /* we are not using post-decode VAD */
- || inst->VADInst.VADDecision == 0 )
- { /* ... or, post-decode VAD says passive speaker */
-#endif /* NETEQ_VAD */
-
- /*Insert zeros to guarantee that boundary values do not distort autocorrelation */
- WEBRTC_SPL_MEMCPY_W16(pw16_tempVec, pw16_vec - BGN_LPC_ORDER, BGN_LPC_ORDER);
- WebRtcSpl_MemSetW16(pw16_vec - BGN_LPC_ORDER, 0, BGN_LPC_ORDER);
-
- w16_maxSample = WebRtcSpl_MaxAbsValueW16(pw16_vec, w16_vecLen);
- w16_tmp = 8 /* log2(w16_veclen) = 8 */
- - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_maxSample, w16_maxSample));
- w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
-
- WebRtcNetEQ_CrossCorr(pw32_autoCorr, pw16_vec, pw16_vec, w16_vecLen, BGN_LPC_ORDER + 1,
- w16_tmp, -1);
-
- /* Copy back data */
- WEBRTC_SPL_MEMCPY_W16(pw16_vec - BGN_LPC_ORDER, pw16_tempVec, BGN_LPC_ORDER);
-
- w16_enSampleShift = 8 - w16_tmp; /* Number of shifts to get energy/sample */
- /* pw32_autoCorr[0]>>w16_enSampleShift */
- w32_en = WEBRTC_SPL_RSHIFT_W32(pw32_autoCorr[0], w16_enSampleShift);
- if ((w32_en < BGN_Inst->w32_energyUpdate
-#ifdef NETEQ_VAD
- /* post-decode VAD disabled and w32_en sufficiently low */
- && !inst->VADInst.VADEnabled)
- /* ... or, post-decode VAD says passive speaker */
- || (inst->VADInst.VADEnabled && inst->VADInst.VADDecision == 0)
-#else
- ) /* just close the extra parenthesis */
-#endif /* NETEQ_VAD */
- )
- {
- /* Generate LPC coefficients */
- if (pw32_autoCorr[0] > 0)
- {
- /* regardless of whether the filter is actually updated or not,
- update energy threshold levels, since we have in fact observed
- a low energy signal */
- if (w32_en < BGN_Inst->w32_energyUpdate)
- {
- /* Never get under 1.0 in average sample energy */
- BGN_Inst->w32_energyUpdate = WEBRTC_SPL_MAX(w32_en, 1);
- BGN_Inst->w32_energyUpdateLow = 0;
- }
-
- stability = WebRtcSpl_LevinsonDurbin(pw32_autoCorr, pw16_A, pw16_rc, BGN_LPC_ORDER);
- /* Only update BGN if filter is stable */
- if (stability != 1)
- {
- return;
- }
- }
- else
- {
- /* Do not update */
- return;
- }
- /* Generate the CNG gain factor by looking at the energy of the residual */
- WebRtcSpl_FilterMAFastQ12(pw16_vec + w16_vecLen - 64, pw16_outVec, pw16_A,
- BGN_LPC_ORDER + 1, 64);
- w32_enBGN = WebRtcNetEQ_DotW16W16(pw16_outVec, pw16_outVec, 64, 0);
- /* Dot product should never overflow since it is BGN and residual! */
-
- /*
- * Check spectral flatness
- * Comparing the residual variance with the input signal variance tells
- * if the spectrum is flat or not.
- * (20*w32_enBGN) >= (w32_en<<6)
- * Also ensure that the energy is non-zero.
- */
- if ((WEBRTC_SPL_MUL_32_16(w32_enBGN, 20) >= WEBRTC_SPL_LSHIFT_W32(w32_en, 6))
- && (w32_en > 0))
- {
- /* spectrum is flat enough; save filter parameters */
-
- WEBRTC_SPL_MEMCPY_W16(BGN_Inst->pw16_filter, pw16_A, BGN_LPC_ORDER+1);
- WEBRTC_SPL_MEMCPY_W16(BGN_Inst->pw16_filterState,
- pw16_vec + w16_vecLen - BGN_LPC_ORDER, BGN_LPC_ORDER);
-
- /* Save energy level */
- BGN_Inst->w32_energy = WEBRTC_SPL_MAX(w32_en, 1);
-
- /* Update energy threshold levels */
- /* Never get under 1.0 in average sample energy */
- BGN_Inst->w32_energyUpdate = WEBRTC_SPL_MAX(w32_en, 1);
- BGN_Inst->w32_energyUpdateLow = 0;
-
- /* Normalize w32_enBGN to 29 or 30 bits before sqrt */
- w16_tmp2 = WebRtcSpl_NormW32(w32_enBGN) - 1;
- if (w16_tmp2 & 0x1)
- {
- w16_tmp2 -= 1; /* Even number of shifts required */
- }
- w32_enBGN = WEBRTC_SPL_SHIFT_W32(w32_enBGN, w16_tmp2);
-
- /* Calculate scale and shift factor */
- BGN_Inst->w16_scale = (int16_t) WebRtcSpl_SqrtFloor(w32_enBGN);
- BGN_Inst->w16_scaleShift = 13 + ((6 + w16_tmp2) >> 1); /* RANDN table is in Q13, */
- /* 6=log2(64) */
-
- BGN_Inst->w16_initialized = 1;
- }
-
- }
- else
- {
- /*
- * Will only happen if post-decode VAD is disabled and w32_en is not low enough.
- * Increase the threshold for update so that it increases by a factor 4 in four
- * seconds.
- * energy = energy * 1.0035
- */
- w32_tmp = WEBRTC_SPL_MUL_16_16_RSFT(NETEQFIX_BGNFRAQINCQ16,
- BGN_Inst->w32_energyUpdateLow, 16);
- w32_tmp += WEBRTC_SPL_MUL_16_16(NETEQFIX_BGNFRAQINCQ16,
- (int16_t)(BGN_Inst->w32_energyUpdate & 0xFF));
- w32_tmp += (WEBRTC_SPL_MUL_16_16(NETEQFIX_BGNFRAQINCQ16,
- (int16_t)((BGN_Inst->w32_energyUpdate>>8) & 0xFF)) << 8);
- BGN_Inst->w32_energyUpdateLow += w32_tmp;
-
- BGN_Inst->w32_energyUpdate += WEBRTC_SPL_MUL_16_16(NETEQFIX_BGNFRAQINCQ16,
- (int16_t)(BGN_Inst->w32_energyUpdate>>16));
- BGN_Inst->w32_energyUpdate += BGN_Inst->w32_energyUpdateLow >> 16;
- BGN_Inst->w32_energyUpdateLow = (BGN_Inst->w32_energyUpdateLow & 0x0FFFF);
-
- /* Update maximum energy */
- /* Decrease by a factor 1/1024 each time */
- BGN_Inst->w32_energyMax = BGN_Inst->w32_energyMax - (BGN_Inst->w32_energyMax >> 10);
- if (w32_en > BGN_Inst->w32_energyMax)
- {
- BGN_Inst->w32_energyMax = w32_en;
- }
-
- /* Set update level to at the minimum 60.21dB lower then the maximum energy */
- w32_enUpdateThreashold = (BGN_Inst->w32_energyMax + 524288) >> 20;
- if (w32_enUpdateThreashold > BGN_Inst->w32_energyUpdate)
- {
- BGN_Inst->w32_energyUpdate = w32_enUpdateThreashold;
- }
- }
-
-#ifdef NETEQ_VAD
-} /* closing initial if-statement */
-#endif /* NETEQ_VAD */
-
- return;
-}
-
-#undef SCRATCH_PW32_AUTO_CORR
-#undef SCRATCH_PW16_TEMP_VEC
-#undef SCRATCH_PW16_RC
-#undef SCRATCH_PW16_OUT_VEC
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc
index 70b49310660..0388b195024 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/buffer_level_filter.h"
+#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
#include <algorithm> // Provide access to std::max.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter.h
index 282ab7a2221..48f7f564c98 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter.h
@@ -8,10 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_BUFFER_LEVEL_FILTER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_BUFFER_LEVEL_FILTER_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_BUFFER_LEVEL_FILTER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_BUFFER_LEVEL_FILTER_H_
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
namespace webrtc {
@@ -44,4 +44,4 @@ class BufferLevelFilter {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_BUFFER_LEVEL_FILTER_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_BUFFER_LEVEL_FILTER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter_unittest.cc
index ddaf08d1d38..9589099d4a0 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/buffer_level_filter_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_level_filter_unittest.cc
@@ -10,7 +10,7 @@
// Unit tests for BufferLevelFilter class.
-#include "webrtc/modules/audio_coding/neteq4/buffer_level_filter.h"
+#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
#include <math.h> // Access to pow function.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_stats.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_stats.h
deleted file mode 100644
index 722f477ea04..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/buffer_stats.h
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Calculates and stores the packet buffer statistics.
- */
-
-#ifndef BUFFER_STATS_H
-#define BUFFER_STATS_H
-
-#include "automode.h"
-#include "webrtc_neteq.h" /* to define enum WebRtcNetEQPlayoutMode */
-
-/* NetEQ related decisions */
-#define BUFSTATS_DO_NORMAL 0
-#define BUFSTATS_DO_ACCELERATE 1
-#define BUFSTATS_DO_MERGE 2
-#define BUFSTATS_DO_EXPAND 3
-#define BUFSTAT_REINIT 4
-#define BUFSTATS_DO_RFC3389CNG_PACKET 5
-#define BUFSTATS_DO_RFC3389CNG_NOPACKET 6
-#define BUFSTATS_DO_INTERNAL_CNG_NOPACKET 7
-#define BUFSTATS_DO_PREEMPTIVE_EXPAND 8
-#define BUFSTAT_REINIT_DECODER 9
-#define BUFSTATS_DO_DTMF_ONLY 10
-/* Decisions related to when NetEQ is switched off (or in FAX mode) */
-#define BUFSTATS_DO_ALTERNATIVE_PLC 11
-#define BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS 12
-#define BUFSTATS_DO_AUDIO_REPETITION 13
-#define BUFSTATS_DO_AUDIO_REPETITION_INC_TS 14
-
-/* Reinit decoder states after this number of expands (upon arrival of new packet) */
-#define REINIT_AFTER_EXPANDS 100
-
-/* Wait no longer than this number of RecOut calls before using an "early" packet */
-#define MAX_WAIT_FOR_PACKET 10
-
-/* CNG modes */
-#define CNG_OFF 0
-#define CNG_RFC3389_ON 1
-#define CNG_INTERNAL_ON 2
-
-typedef struct
-{
-
- /* store statistical data here */
- int16_t w16_cngOn; /* remember if CNG is interrupted by other event (e.g. DTMF) */
- int16_t w16_noExpand;
- int32_t uw32_CNGplayedTS;
-
- /* VQmon data */
- uint16_t avgDelayMsQ8;
- int16_t maxDelayMs;
-
- AutomodeInst_t Automode_inst;
-
-} BufstatsInst_t;
-
-/****************************************************************************
- * WebRtcNetEQ_BufstatsDecision()
- *
- * Gives a decision about what action that is currently desired
- *
- *
- * Input:
- * inst: The bufstat instance
- * cur_size: Current buffer size in ms in Q3 domain
- * targetTS: The desired timestamp to start playout from
- * availableTS: The closest future value available in buffer
- * noPacket 1 if no packet is available, makes availableTS undefined
- * prevPlayMode mode of last NetEq playout
- * timestampsPerCall number of timestamp for 10ms
- *
- * Output:
- * Returns: A decision, as defined above (see top of file)
- *
- */
-
-uint16_t WebRtcNetEQ_BufstatsDecision(BufstatsInst_t *inst, int16_t frameSize,
- int32_t cur_size, uint32_t targetTS,
- uint32_t availableTS, int noPacket,
- int cngPacket, int prevPlayMode,
- enum WebRtcNetEQPlayoutMode playoutMode,
- int timestampsPerCall, int NoOfExpandCalls,
- int16_t fs_mult,
- int16_t lastModeBGNonly, int playDtmf);
-
-#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/bufstats_decision.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/bufstats_decision.c
deleted file mode 100644
index 352e0507746..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/bufstats_decision.c
+++ /dev/null
@@ -1,427 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the function where the main decision logic for buffer level
- * adaptation happens.
- */
-
-#include "buffer_stats.h"
-
-#include <assert.h>
-
-#include "signal_processing_library.h"
-
-#include "automode.h"
-#include "neteq_defines.h"
-#include "neteq_error_codes.h"
-#include "webrtc_neteq.h"
-
-#define NETEQ_BUFSTAT_20MS_Q7 2560 /* = 20 ms in Q7 */
-
-uint16_t WebRtcNetEQ_BufstatsDecision(BufstatsInst_t *inst, int16_t frameSize,
- int32_t cur_size, uint32_t targetTS,
- uint32_t availableTS, int noPacket,
- int cngPacket, int prevPlayMode,
- enum WebRtcNetEQPlayoutMode playoutMode,
- int timestampsPerCall, int NoOfExpandCalls,
- int16_t fs_mult,
- int16_t lastModeBGNonly, int playDtmf)
-{
-
- int currentDelayMs;
- int32_t currSizeSamples = cur_size;
- int extraDelayPacketsQ8 = 0;
-
- /* Avoid overflow if the buffer size should be really large (cur_size is limited 256ms) */
- int32_t curr_sizeQ7 = WEBRTC_SPL_LSHIFT_W32(cur_size, 4);
- int level_limit_hi, level_limit_lo;
-
- inst->Automode_inst.prevTimeScale &= (prevPlayMode == MODE_SUCCESS_ACCELERATE
- || prevPlayMode == MODE_LOWEN_ACCELERATE || prevPlayMode == MODE_SUCCESS_PREEMPTIVE
- || prevPlayMode == MODE_LOWEN_PREEMPTIVE);
-
- if ((prevPlayMode != MODE_RFC3389CNG) && (prevPlayMode != MODE_CODEC_INTERNAL_CNG))
- {
- /*
- * Do not update buffer history if currently playing CNG
- * since it will bias the filtered buffer level.
- */
- WebRtcNetEQ_BufferLevelFilter(cur_size, &(inst->Automode_inst), timestampsPerCall,
- fs_mult);
- }
- else
- {
- /* only update time counters */
- inst->Automode_inst.packetIatCountSamp += timestampsPerCall; /* packet inter-arrival time */
- inst->Automode_inst.peakIatCountSamp += timestampsPerCall; /* peak inter-arrival time */
- inst->Automode_inst.timescaleHoldOff >>= 1; /* time-scaling limiter */
- }
- cur_size = WEBRTC_SPL_MIN(curr_sizeQ7, WEBRTC_SPL_WORD16_MAX);
-
- /* Calculate VQmon related variables */
- /* avgDelay = avgDelay*(511/512) + currentDelay*(1/512) (sample ms delay in Q8) */
- inst->avgDelayMsQ8 = (int16_t) (WEBRTC_SPL_MUL_16_16_RSFT(inst->avgDelayMsQ8,511,9)
- + (cur_size >> 9));
-
- /* Update maximum delay if needed */
- currentDelayMs = (curr_sizeQ7 >> 7);
- if (currentDelayMs > inst->maxDelayMs)
- {
- inst->maxDelayMs = currentDelayMs;
- }
-
- /* NetEQ is on with normal or steaming mode */
- if (playoutMode == kPlayoutOn || playoutMode == kPlayoutStreaming)
- {
- /* Guard for errors, so that it should not get stuck in error mode */
- if (prevPlayMode == MODE_ERROR)
- {
- if (noPacket)
- {
- return BUFSTATS_DO_EXPAND;
- }
- else
- {
- return BUFSTAT_REINIT;
- }
- }
-
- if (prevPlayMode != MODE_EXPAND && prevPlayMode != MODE_FADE_TO_BGN)
- {
- inst->w16_noExpand = 1;
- }
- else
- {
- inst->w16_noExpand = 0;
- }
-
- if (cngPacket)
- {
- /* signed difference between wanted and available TS */
- int32_t diffTS = (inst->uw32_CNGplayedTS + targetTS) - availableTS;
- int32_t optimal_level_samp = (inst->Automode_inst.optBufLevel *
- inst->Automode_inst.packetSpeechLenSamp) >> 8;
- int32_t excess_waiting_time_samp = -diffTS - optimal_level_samp;
-
- if (excess_waiting_time_samp > optimal_level_samp / 2)
- {
- /* The waiting time for this packet will be longer than 1.5
- * times the wanted buffer delay. Advance the clock to cut
- * waiting time down to the optimal.
- */
- inst->uw32_CNGplayedTS += excess_waiting_time_samp;
- diffTS += excess_waiting_time_samp;
- }
-
- if ((diffTS) < 0 && (prevPlayMode == MODE_RFC3389CNG))
- {
- /* Not time to play this packet yet. Wait another round before using this
- * packet. Keep on playing CNG from previous CNG parameters. */
- return BUFSTATS_DO_RFC3389CNG_NOPACKET;
- }
-
- /* otherwise, go for the CNG packet now */
- return BUFSTATS_DO_RFC3389CNG_PACKET;
- }
-
- /*Check for expand/cng */
- if (noPacket)
- {
- if (inst->w16_cngOn == CNG_RFC3389_ON)
- {
- /* keep on playing CNG */
- return BUFSTATS_DO_RFC3389CNG_NOPACKET;
- }
- else if (inst->w16_cngOn == CNG_INTERNAL_ON)
- {
- /* keep on playing internal CNG */
- return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
- }
- else if (playDtmf == 1)
- {
- /* we have not audio data, but can play DTMF */
- return BUFSTATS_DO_DTMF_ONLY;
- }
- else
- {
- /* nothing to play => do Expand */
- return BUFSTATS_DO_EXPAND;
- }
- }
-
- /*
- * If the expand period was very long, reset NetEQ since it is likely that the
- * sender was restarted.
- */
- if (NoOfExpandCalls > REINIT_AFTER_EXPANDS) return BUFSTAT_REINIT_DECODER;
-
- /* Calculate extra delay in Q8 packets */
- if (inst->Automode_inst.extraDelayMs > 0 && inst->Automode_inst.packetSpeechLenSamp
- > 0)
- {
-
- /* (extra delay in samples in Q8) */
- extraDelayPacketsQ8 =
- ((inst->Automode_inst.extraDelayMs * 8 * fs_mult) << 8) /
- inst->Automode_inst.packetSpeechLenSamp;
- }
-
- /* Check if needed packet is available */
- if (targetTS == availableTS)
- {
-
- /* If last mode was not expand, and there is no DTMF to play */
- if (inst->w16_noExpand == 1 && playDtmf == 0)
- {
- /* If so check for accelerate */
-
- level_limit_lo = ((inst->Automode_inst.optBufLevel) >> 1) /* 50 % */
- + ((inst->Automode_inst.optBufLevel) >> 2); /* ... + 25% = 75% */
-
- /* set upper limit to optBufLevel, but make sure that window is at least 20ms */
- level_limit_hi = WEBRTC_SPL_MAX(inst->Automode_inst.optBufLevel,
- level_limit_lo +
- WebRtcSpl_DivW32W16ResW16((WEBRTC_SPL_MUL(20*8, fs_mult) << 8),
- inst->Automode_inst.packetSpeechLenSamp));
-
- /* if extra delay is non-zero, add it */
- if (extraDelayPacketsQ8 > 0)
- {
- level_limit_hi += extraDelayPacketsQ8;
- level_limit_lo += extraDelayPacketsQ8;
- }
-
- if (((inst->Automode_inst.buffLevelFilt >= level_limit_hi) &&
- (inst->Automode_inst.timescaleHoldOff == 0)) ||
- (inst->Automode_inst.buffLevelFilt >= level_limit_hi << 2))
- {
- /*
- * Buffer level higher than limit and time-scaling allowed,
- * OR buffer level _really_ high.
- */
- return BUFSTATS_DO_ACCELERATE;
- }
- else if ((inst->Automode_inst.buffLevelFilt < level_limit_lo)
- && (inst->Automode_inst.timescaleHoldOff == 0))
- {
- return BUFSTATS_DO_PREEMPTIVE_EXPAND;
- }
- }
- return BUFSTATS_DO_NORMAL;
- }
-
- /* Check for Merge */
- else if (availableTS > targetTS)
- {
-
- /* Check that we do not play a packet "too early" */
- if ((prevPlayMode == MODE_EXPAND)
- && (availableTS - targetTS
- < (uint32_t) WEBRTC_SPL_MUL_16_16((int16_t)timestampsPerCall,
- (int16_t)REINIT_AFTER_EXPANDS))
- && (NoOfExpandCalls < MAX_WAIT_FOR_PACKET)
- && (availableTS
- > targetTS
- + WEBRTC_SPL_MUL_16_16((int16_t)timestampsPerCall,
- (int16_t)NoOfExpandCalls))
- && (inst->Automode_inst.buffLevelFilt <= inst->Automode_inst.optBufLevel
- + extraDelayPacketsQ8))
- {
- if (playDtmf == 1)
- {
- /* we still have DTMF to play, so do not perform expand */
- return BUFSTATS_DO_DTMF_ONLY;
- }
- else
- {
- /* nothing to play */
- return BUFSTATS_DO_EXPAND;
- }
- }
-
- /* If previous was CNG period or BGNonly then no merge is needed */
- if ((prevPlayMode == MODE_RFC3389CNG) || (prevPlayMode == MODE_CODEC_INTERNAL_CNG)
- || lastModeBGNonly)
- {
- /*
- * Keep the same delay as before the CNG (or maximum 70 ms in buffer as safety
- * precaution), but make sure that the number of samples in buffer is no
- * higher than 4 times the optimal level.
- */
- int32_t diffTS = (inst->uw32_CNGplayedTS + targetTS) - availableTS;
- int val = ((inst->Automode_inst.optBufLevel +
- extraDelayPacketsQ8) *
- inst->Automode_inst.packetSpeechLenSamp) >> 6;
- if (diffTS >= 0 || val < currSizeSamples)
- {
- /* it is time to play this new packet */
- return BUFSTATS_DO_NORMAL;
- }
- else
- {
- /* it is too early to play this new packet => keep on playing CNG */
- if (prevPlayMode == MODE_RFC3389CNG)
- {
- return BUFSTATS_DO_RFC3389CNG_NOPACKET;
- }
- else if (prevPlayMode == MODE_CODEC_INTERNAL_CNG)
- {
- return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
- }
- else if (playDtmf == 1)
- {
- /* we have not audio data, but can play DTMF */
- return BUFSTATS_DO_DTMF_ONLY;
- }
- else /* lastModeBGNonly */
- {
- /* signal expand, but this will result in BGN again */
- return BUFSTATS_DO_EXPAND;
- }
- }
- }
-
- /* Do not merge unless we have done a Expand before (for complexity reasons) */
- if ((inst->w16_noExpand == 0) || ((frameSize < timestampsPerCall) && (cur_size
- > NETEQ_BUFSTAT_20MS_Q7)))
- {
- return BUFSTATS_DO_MERGE;
- }
- else if (playDtmf == 1)
- {
- /* play DTMF instead of expand */
- return BUFSTATS_DO_DTMF_ONLY;
- }
- else
- {
- return BUFSTATS_DO_EXPAND;
- }
- }
- }
- else
- { /* kPlayoutOff or kPlayoutFax */
- if (cngPacket)
- {
- if (((int32_t) ((inst->uw32_CNGplayedTS + targetTS) - availableTS)) >= 0)
- {
- /* time to play this packet now */
- return BUFSTATS_DO_RFC3389CNG_PACKET;
- }
- else
- {
- /* wait before playing this packet */
- return BUFSTATS_DO_RFC3389CNG_NOPACKET;
- }
- }
- if (noPacket)
- {
- /*
- * No packet =>
- * 1. If in CNG mode play as usual
- * 2. Otherwise use other method to generate data and hold TS value
- */
- if (inst->w16_cngOn == CNG_RFC3389_ON)
- {
- /* keep on playing CNG */
- return BUFSTATS_DO_RFC3389CNG_NOPACKET;
- }
- else if (inst->w16_cngOn == CNG_INTERNAL_ON)
- {
- /* keep on playing internal CNG */
- return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
- }
- else
- {
- /* nothing to play => invent some data to play out */
- if (playoutMode == kPlayoutOff)
- {
- return BUFSTATS_DO_ALTERNATIVE_PLC;
- }
- else if (playoutMode == kPlayoutFax)
- {
- return BUFSTATS_DO_AUDIO_REPETITION;
- }
- else
- {
- /* UNDEFINED, should not get here... */
- assert(0);
- return BUFSTAT_REINIT;
- }
- }
- }
- else if (targetTS == availableTS)
- {
- return BUFSTATS_DO_NORMAL;
- }
- else
- {
- if (((int32_t) ((inst->uw32_CNGplayedTS + targetTS) - availableTS)) >= 0)
- {
- return BUFSTATS_DO_NORMAL;
- }
- else if (playoutMode == kPlayoutOff)
- {
- /*
- * If currently playing CNG, continue with that. Don't increase TS
- * since uw32_CNGplayedTS will be increased.
- */
- if (inst->w16_cngOn == CNG_RFC3389_ON)
- {
- return BUFSTATS_DO_RFC3389CNG_NOPACKET;
- }
- else if (inst->w16_cngOn == CNG_INTERNAL_ON)
- {
- return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
- }
- else
- {
- /*
- * Otherwise, do PLC and increase TS while waiting for the time to
- * play this packet.
- */
- return BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS;
- }
- }
- else if (playoutMode == kPlayoutFax)
- {
- /*
- * If currently playing CNG, continue with that don't increase TS since
- * uw32_CNGplayedTS will be increased.
- */
- if (inst->w16_cngOn == CNG_RFC3389_ON)
- {
- return BUFSTATS_DO_RFC3389CNG_NOPACKET;
- }
- else if (inst->w16_cngOn == CNG_INTERNAL_ON)
- {
- return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
- }
- else
- {
- /*
- * Otherwise, do audio repetition and increase TS while waiting for the
- * time to play this packet.
- */
- return BUFSTATS_DO_AUDIO_REPETITION_INC_TS;
- }
- }
- else
- {
- /* UNDEFINED, should not get here... */
- assert(0);
- return BUFSTAT_REINIT;
- }
- }
- }
- /* We should not get here (but sometimes we do anyway...) */
- return BUFSTAT_REINIT;
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/cng_internal.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/cng_internal.c
deleted file mode 100644
index cb4878fee57..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/cng_internal.c
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the function for obtaining comfort noise from noise parameters
- * according to IETF RFC 3389.
- */
-
-#include "dsp.h"
-
-#include "signal_processing_library.h"
-#include "webrtc_cng.h"
-
-#include "dsp_helpfunctions.h"
-#include "neteq_error_codes.h"
-
-/****************************************************************************
- * WebRtcNetEQ_Cng(...)
- *
- * This function produces CNG according to RFC 3389.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - len : Number of samples to produce (max 640 or
- * 640 - fsHz*5/8000 for first-time CNG, governed by
- * the definition of WEBRTC_CNG_MAX_OUTSIZE_ORDER in
- * webrtc_cng.h)
- *
- * Output:
- * - pw16_outData : Output CNG
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-#ifdef NETEQ_CNG_CODEC
-/* Must compile NetEQ with CNG support to enable this function */
-
-int WebRtcNetEQ_Cng(DSPInst_t *inst, int16_t *pw16_outData, int len)
-{
- int16_t w16_winMute = 0; /* mixing factor for overlap data */
- int16_t w16_winUnMute = 0; /* mixing factor for comfort noise */
- int16_t w16_winMuteInc = 0; /* mixing factor increment (negative) */
- int16_t w16_winUnMuteInc = 0; /* mixing factor increment */
- int i;
-
- /*
- * Check if last RecOut call was other than RFC3389,
- * that is, this call is the first of a CNG period.
- */
- if (inst->w16_mode != MODE_RFC3389CNG)
- {
- /* Reset generation and overlap slightly with old data */
-
- /* Generate len samples + overlap */
- if (WebRtcCng_Generate(inst->CNG_Codec_inst, pw16_outData,
- (int16_t) (len + inst->ExpandInst.w16_overlap), 1) < 0)
- {
- /* error returned */
- return -WebRtcCng_GetErrorCodeDec(inst->CNG_Codec_inst);
- }
-
- /* Set windowing parameters depending on sample rate */
- if (inst->fs == 8000)
- {
- /* Windowing in Q15 */
- w16_winMute = NETEQ_OVERLAP_WINMUTE_8KHZ_START;
- w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_8KHZ_INC;
- w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_8KHZ_START;
- w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_8KHZ_INC;
-#ifdef NETEQ_WIDEBAND
- }
- else if (inst->fs == 16000)
- {
- /* Windowing in Q15 */
- w16_winMute = NETEQ_OVERLAP_WINMUTE_16KHZ_START;
- w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_16KHZ_INC;
- w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_16KHZ_START;
- w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_16KHZ_INC;
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- }
- else if (inst->fs == 32000)
- {
- /* Windowing in Q15 */
- w16_winMute = NETEQ_OVERLAP_WINMUTE_32KHZ_START;
- w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_32KHZ_INC;
- w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_32KHZ_START;
- w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_32KHZ_INC;
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- }
- else if (inst->fs == 48000)
- {
- /* Windowing in Q15 */
- w16_winMute = NETEQ_OVERLAP_WINMUTE_48KHZ_START;
- w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_48KHZ_INC;
- w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_48KHZ_START;
- w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_48KHZ_INC;
-#endif
- }
- else
- {
- /* Unsupported sample rate (should not be possible) */
- return NETEQ_OTHER_ERROR;
- }
-
- /* Do overlap add between new vector and overlap */
- for (i = 0; i < inst->ExpandInst.w16_overlap; i++)
- {
- /* overlapVec[i] = WinMute * overlapVec[i] + WinUnMute * outData[i] */
- inst->ExpandInst.pw16_overlapVec[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
- WEBRTC_SPL_MUL_16_16(
- inst->ExpandInst.pw16_overlapVec[i], w16_winMute) +
- WEBRTC_SPL_MUL_16_16(pw16_outData[i], w16_winUnMute)
- + 16384, 15); /* shift with proper rounding */
-
- w16_winMute += w16_winMuteInc; /* decrease mute factor (inc<0) */
- w16_winUnMute += w16_winUnMuteInc; /* increase unmute factor (inc>0) */
-
- }
-
- /*
- * Shift the contents of the outData buffer by overlap samples, since we
- * already used these first samples in the overlapVec above
- */
-
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_outData+inst->ExpandInst.w16_overlap, len);
-
- }
- else
- {
- /* This is a subsequent CNG call; no special overlap needed */
-
- /* Generate len samples */
- if (WebRtcCng_Generate(inst->CNG_Codec_inst, pw16_outData, (int16_t) len, 0) < 0)
- {
- /* error returned */
- return -WebRtcCng_GetErrorCodeDec(inst->CNG_Codec_inst);
- }
- }
-
- return 0;
-
-}
-
-#endif /* NETEQ_CNG_CODEC */
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db.c
deleted file mode 100644
index bb34f5e58d3..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db.c
+++ /dev/null
@@ -1,782 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Implementation of the codec database.
- */
-
-#include "codec_db.h"
-
-#include <string.h> /* to define NULL */
-
-#include "signal_processing_library.h"
-
-#include "neteq_error_codes.h"
-
-/*
- * Resets the codec database.
- */
-
-int WebRtcNetEQ_DbReset(CodecDbInst_t *inst)
-{
- int i;
-
- WebRtcSpl_MemSetW16((int16_t*) inst, 0,
- sizeof(CodecDbInst_t) / sizeof(int16_t));
-
- for (i = 0; i < NUM_TOTAL_CODECS; i++)
- {
- inst->position[i] = -1;
- }
-
- for (i = 0; i < NUM_CODECS; i++)
- {
- inst->payloadType[i] = -1;
- }
-
- for (i = 0; i < NUM_CNG_CODECS; i++)
- {
- inst->CNGpayloadType[i] = -1;
- }
-
- return 0;
-}
-
-/*
- * Adds a new codec to the database.
- */
-
-int WebRtcNetEQ_DbAdd(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec,
- int16_t payloadType, FuncDecode funcDecode,
- FuncDecode funcDecodeRCU, FuncDecodePLC funcDecodePLC,
- FuncDecodeInit funcDecodeInit, FuncAddLatePkt funcAddLatePkt,
- FuncGetMDinfo funcGetMDinfo, FuncGetPitchInfo funcGetPitch,
- FuncUpdBWEst funcUpdBWEst, FuncDurationEst funcDurationEst,
- FuncGetErrorCode funcGetErrorCode, void* codec_state,
- uint16_t codec_fs)
-{
-
- int temp;
- int insertCNGcodec = 0, overwriteCNGcodec = 0, CNGpos = -1;
-
-#ifndef NETEQ_RED_CODEC
- if (codec == kDecoderRED)
- {
- return CODEC_DB_UNSUPPORTED_CODEC;
- }
-#endif
- if (((int) codec <= (int) kDecoderReservedStart) || ((int) codec
- >= (int) kDecoderReservedEnd))
- {
- return CODEC_DB_UNSUPPORTED_CODEC;
- }
-
- if ((codec_fs != 8000)
-#ifdef NETEQ_WIDEBAND
- &&(codec_fs!=16000)
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- &&(codec_fs!=32000)
-#endif
-#if defined(NETEQ_48KHZ_WIDEBAND) || defined(NETEQ_OPUS_CODEC)
- &&(codec_fs!=48000)
-#endif
- )
- {
- return CODEC_DB_UNSUPPORTED_FS;
- }
-
- /* Ensure that the codec type is supported */
- switch (codec)
- {
-#ifdef NETEQ_PCM16B_CODEC
- case kDecoderPCM16B :
- case kDecoderPCM16B_2ch :
-#endif
-#ifdef NETEQ_G711_CODEC
- case kDecoderPCMu :
- case kDecoderPCMa :
- case kDecoderPCMu_2ch :
- case kDecoderPCMa_2ch :
-#endif
-#ifdef NETEQ_ILBC_CODEC
- case kDecoderILBC :
-#endif
-#ifdef NETEQ_ISAC_CODEC
- case kDecoderISAC :
-#endif
-#ifdef NETEQ_ISAC_SWB_CODEC
- case kDecoderISACswb :
-#endif
-#ifdef NETEQ_ISAC_FB_CODEC
- case kDecoderISACfb :
-#endif
-#ifdef NETEQ_OPUS_CODEC
- case kDecoderOpus :
-#endif
-#ifdef NETEQ_G722_CODEC
- case kDecoderG722 :
- case kDecoderG722_2ch :
-#endif
-#ifdef NETEQ_WIDEBAND
- case kDecoderPCM16Bwb :
- case kDecoderPCM16Bwb_2ch :
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- case kDecoderPCM16Bswb32kHz :
- case kDecoderPCM16Bswb32kHz_2ch :
-#endif
-#ifdef NETEQ_CNG_CODEC
- case kDecoderCNG :
-#endif
-#ifdef NETEQ_ATEVENT_DECODE
- case kDecoderAVT :
-#endif
-#ifdef NETEQ_RED_CODEC
- case kDecoderRED :
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- case kDecoderPCM16Bswb48kHz :
-#endif
-#ifdef NETEQ_ARBITRARY_CODEC
- case kDecoderArbitrary:
-#endif
-#ifdef NETEQ_G729_CODEC
- case kDecoderG729:
-#endif
-#ifdef NETEQ_G729_1_CODEC
- case kDecoderG729_1 :
-#endif
-#ifdef NETEQ_G726_CODEC
- case kDecoderG726_16 :
- case kDecoderG726_24 :
- case kDecoderG726_32 :
- case kDecoderG726_40 :
-#endif
-#ifdef NETEQ_G722_1_CODEC
- case kDecoderG722_1_16 :
- case kDecoderG722_1_24 :
- case kDecoderG722_1_32 :
-#endif
-#ifdef NETEQ_G722_1C_CODEC
- case kDecoderG722_1C_24 :
- case kDecoderG722_1C_32 :
- case kDecoderG722_1C_48 :
-#endif
-#ifdef NETEQ_SPEEX_CODEC
- case kDecoderSPEEX_8 :
- case kDecoderSPEEX_16 :
-#endif
-#ifdef NETEQ_CELT_CODEC
- case kDecoderCELT_32 :
- case kDecoderCELT_32_2ch :
-#endif
-#ifdef NETEQ_GSMFR_CODEC
- case kDecoderGSMFR :
-#endif
-#ifdef NETEQ_AMR_CODEC
- case kDecoderAMR :
-#endif
-#ifdef NETEQ_AMRWB_CODEC
- case kDecoderAMRWB :
-#endif
- {
- /* If we end up here, the inserted codec is supported => Do nothing */
- break;
- }
- default:
- {
- /* If we get to this point, the inserted codec is not supported */
- return CODEC_DB_UNSUPPORTED_CODEC;
- }
- }
-
- /* Check to see if payload type is taken */
- if (WebRtcNetEQ_DbGetCodec(inst, payloadType) > 0)
- {
- return CODEC_DB_PAYLOAD_TAKEN;
- }
-
- /* Special case for CNG codecs */
- if (codec == kDecoderCNG)
- {
- /* check if this is first CNG codec to be registered */
- if (WebRtcNetEQ_DbGetPayload(inst, codec) == CODEC_DB_NOT_EXIST2)
- {
- /* no other CNG codec found */
- insertCNGcodec = 1;
- }
-
- /* find the appropriate insert position in CNG payload vector */
- switch (codec_fs)
- {
- case 8000:
- CNGpos = 0;
- /*
- * The 8 kHz CNG payload type is the one associated with the regular codec DB
- * should override any other setting.
- * Overwrite if this isn't the first CNG
- */
- overwriteCNGcodec = !insertCNGcodec;
- break;
-#ifdef NETEQ_WIDEBAND
- case 16000:
- CNGpos = 1;
- break;
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- case 32000:
- CNGpos = 2;
- break;
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- case 48000:
- CNGpos = 3;
- break;
-#endif
- default:
- /* If we get to this point, the inserted codec is not supported */
- return CODEC_DB_UNSUPPORTED_CODEC;
- }
-
- /* insert CNG payload type */
- inst->CNGpayloadType[CNGpos] = payloadType;
-
- }
-
- if ((codec != kDecoderCNG) || (insertCNGcodec == 1) || (overwriteCNGcodec == 1))
- {
- /* Check if we have reached the maximum numbers of simultaneous codecs */
- if (inst->nrOfCodecs == NUM_CODECS) return CODEC_DB_FULL;
-
- /* Check that codec has not already been initialized to DB =>
- remove it and reinitialize according to new spec */
- if ((inst->position[codec] != -1) && (overwriteCNGcodec != 1))
- { /* if registering multiple CNG codecs, don't remove, just overwrite */
- WebRtcNetEQ_DbRemove(inst, codec);
- }
-
- if (overwriteCNGcodec == 1)
- {
- temp = inst->position[codec];
- }
- else
- {
- temp = inst->nrOfCodecs; /* Store this codecs position */
- inst->position[codec] = temp;
- inst->nrOfCodecs++;
- }
-
- inst->payloadType[temp] = payloadType;
-
- /* Copy to database */
- inst->codec_state[temp] = codec_state;
- inst->funcDecode[temp] = funcDecode;
- inst->funcDecodeRCU[temp] = funcDecodeRCU;
- inst->funcAddLatePkt[temp] = funcAddLatePkt;
- inst->funcDecodeInit[temp] = funcDecodeInit;
- inst->funcDecodePLC[temp] = funcDecodePLC;
- inst->funcGetMDinfo[temp] = funcGetMDinfo;
- inst->funcGetPitch[temp] = funcGetPitch;
- inst->funcUpdBWEst[temp] = funcUpdBWEst;
- inst->funcDurationEst[temp] = funcDurationEst;
- inst->funcGetErrorCode[temp] = funcGetErrorCode;
- inst->codec_fs[temp] = codec_fs;
-
- }
-
- return 0;
-}
-
-/*
- * Removes a codec from the database.
- */
-
-int WebRtcNetEQ_DbRemove(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec)
-{
- int i;
- int pos = -1;
-
-#ifndef NETEQ_RED_CODEC
- if (codec == kDecoderRED)
- {
- return CODEC_DB_UNSUPPORTED_CODEC;
- }
-#endif
- if (((int) codec <= (int) kDecoderReservedStart) || ((int) codec
- >= (int) kDecoderReservedEnd))
- {
- return CODEC_DB_UNSUPPORTED_CODEC;
- }
-
- pos = inst->position[codec];
- if (pos == -1)
- {
- return CODEC_DB_NOT_EXIST4;
- }
- else
- {
- /* Remove this codec */
- inst->position[codec] = -1;
- for (i = pos; i < (inst->nrOfCodecs - 1); i++)
- {
- inst->payloadType[i] = inst->payloadType[i + 1];
- inst->codec_state[i] = inst->codec_state[i + 1];
- inst->funcDecode[i] = inst->funcDecode[i + 1];
- inst->funcDecodeRCU[i] = inst->funcDecodeRCU[i + 1];
- inst->funcAddLatePkt[i] = inst->funcAddLatePkt[i + 1];
- inst->funcDecodeInit[i] = inst->funcDecodeInit[i + 1];
- inst->funcDecodePLC[i] = inst->funcDecodePLC[i + 1];
- inst->funcGetMDinfo[i] = inst->funcGetMDinfo[i + 1];
- inst->funcGetPitch[i] = inst->funcGetPitch[i + 1];
- inst->funcDurationEst[i] = inst->funcDurationEst[i + 1];
- inst->funcUpdBWEst[i] = inst->funcUpdBWEst[i + 1];
- inst->funcGetErrorCode[i] = inst->funcGetErrorCode[i + 1];
- inst->codec_fs[i] = inst->codec_fs[i + 1];
- }
- inst->payloadType[i] = -1;
- inst->codec_state[i] = NULL;
- inst->funcDecode[i] = NULL;
- inst->funcDecodeRCU[i] = NULL;
- inst->funcAddLatePkt[i] = NULL;
- inst->funcDecodeInit[i] = NULL;
- inst->funcDecodePLC[i] = NULL;
- inst->funcGetMDinfo[i] = NULL;
- inst->funcGetPitch[i] = NULL;
- inst->funcDurationEst[i] = NULL;
- inst->funcUpdBWEst[i] = NULL;
- inst->funcGetErrorCode[i] = NULL;
- inst->codec_fs[i] = 0;
- /* Move down all the codecs above this one */
- for (i = 0; i < NUM_TOTAL_CODECS; i++)
- {
- if (inst->position[i] >= pos)
- {
- inst->position[i] = inst->position[i] - 1;
- }
- }
- inst->nrOfCodecs--;
-
- if (codec == kDecoderCNG)
- {
- /* also remove all registered CNG payload types */
- for (i = 0; i < NUM_CNG_CODECS; i++)
- {
- inst->CNGpayloadType[i] = -1;
- }
- }
- }
- return 0;
-}
-
-/*
- * Get the decoder function pointers for a codec.
- */
-
-int WebRtcNetEQ_DbGetPtrs(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec,
- CodecFuncInst_t *ptr_inst)
-{
-
- int pos = inst->position[codec];
- if ((codec <= kDecoderReservedStart) || (codec >= kDecoderReservedEnd) || (codec
- > NUM_TOTAL_CODECS))
- {
- /* ERROR */
- pos = -1;
- }
- if (pos >= 0)
- {
- ptr_inst->codec_state = inst->codec_state[pos];
- ptr_inst->funcAddLatePkt = inst->funcAddLatePkt[pos];
- ptr_inst->funcDecode = inst->funcDecode[pos];
- ptr_inst->funcDecodeRCU = inst->funcDecodeRCU[pos];
- ptr_inst->funcDecodeInit = inst->funcDecodeInit[pos];
- ptr_inst->funcDecodePLC = inst->funcDecodePLC[pos];
- ptr_inst->funcGetMDinfo = inst->funcGetMDinfo[pos];
- ptr_inst->funcUpdBWEst = inst->funcUpdBWEst[pos];
- ptr_inst->funcGetErrorCode = inst->funcGetErrorCode[pos];
- ptr_inst->codec_fs = inst->codec_fs[pos];
- return 0;
- }
- else
- {
- WebRtcSpl_MemSetW16((int16_t*) ptr_inst, 0,
- sizeof(CodecFuncInst_t) / sizeof(int16_t));
- return CODEC_DB_NOT_EXIST1;
- }
-}
-
-/*
- * Returns payload number given a codec identifier.
- */
-
-int WebRtcNetEQ_DbGetPayload(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codecID)
-{
- if (inst->position[codecID] == -1)
- return CODEC_DB_NOT_EXIST2;
- else
- return (inst->payloadType[inst->position[codecID]]);
-
-}
-
-/*
- * Returns codec identifier given a payload number.
- * Returns -1 if the payload type does not exist.
- */
-
-int WebRtcNetEQ_DbGetCodec(const CodecDbInst_t *inst, int payloadType)
-{
- int i, pos;
-
- for (i = 0; i < NUM_TOTAL_CODECS; i++)
- {
- pos = inst->position[i];
- if (pos != -1)
- {
- if (inst->payloadType[pos] == payloadType) return i;
- }
- }
-
- /* did not find payload type */
- /* check if it's a CNG codec */
- if (WebRtcNetEQ_DbIsCNGPayload(inst, payloadType))
- {
- return kDecoderCNG;
- }
-
- /* found no match */
- return CODEC_DB_NOT_EXIST3;
-}
-
-/*
- * Extracts the Payload Split information of the codec with the specified payloadType.
- */
-
-int WebRtcNetEQ_DbGetSplitInfo(SplitInfo_t *inst, enum WebRtcNetEQDecoder codecID,
- int codedsize)
-{
-
- switch (codecID)
- {
-#ifdef NETEQ_ISAC_CODEC
- case kDecoderISAC:
-#endif
-#ifdef NETEQ_ISAC_SWB_CODEC
- case kDecoderISACswb:
-#endif
-#ifdef NETEQ_ISAC_FB_CODEC
- case kDecoderISACfb:
-#endif
-#ifdef NETEQ_OPUS_CODEC
- case kDecoderOpus:
-#endif
-#ifdef NETEQ_ARBITRARY_CODEC
- case kDecoderArbitrary:
-#endif
-#ifdef NETEQ_AMR_CODEC
- case kDecoderAMR:
-#endif
-#ifdef NETEQ_AMRWB_CODEC
- case kDecoderAMRWB:
-#endif
-#ifdef NETEQ_G726_CODEC
- /* Treat G726 as non-splittable to simplify the implementation */
- case kDecoderG726_16:
- case kDecoderG726_24:
- case kDecoderG726_32:
- case kDecoderG726_40:
-#endif
-#ifdef NETEQ_SPEEX_CODEC
- case kDecoderSPEEX_8:
- case kDecoderSPEEX_16:
-#endif
-#ifdef NETEQ_CELT_CODEC
- case kDecoderCELT_32 :
- case kDecoderCELT_32_2ch :
-#endif
-#ifdef NETEQ_G729_1_CODEC
- case kDecoderG729_1:
-#endif
- {
- /* These codecs' payloads are not splittable */
- inst->deltaBytes = NO_SPLIT;
- return 0;
- }
-
- /*
- * Sample based coders are a special case.
- * In this case, deltaTime signals the number of bytes per timestamp unit times 2
- * in log2 domain.
- */
-#if (defined NETEQ_G711_CODEC)
- case kDecoderPCMu:
- case kDecoderPCMa:
- case kDecoderPCMu_2ch:
- case kDecoderPCMa_2ch:
- {
- inst->deltaBytes = -12;
- inst->deltaTime = 1;
- return 0;
- }
-#endif
-#if (defined NETEQ_G722_CODEC)
- case kDecoderG722:
- case kDecoderG722_2ch:
- {
- inst->deltaBytes = -14;
- inst->deltaTime = 0;
- return 0;
- }
-#endif
-#if (defined NETEQ_PCM16B_CODEC)
- case kDecoderPCM16B:
- case kDecoderPCM16B_2ch:
- {
- inst->deltaBytes = -12;
- inst->deltaTime = 2;
- return 0;
- }
-#endif
-#if ((defined NETEQ_PCM16B_CODEC)&&(defined NETEQ_WIDEBAND))
- case kDecoderPCM16Bwb:
- case kDecoderPCM16Bwb_2ch:
- {
- inst->deltaBytes = -14;
- inst->deltaTime = 2;
- return 0;
- }
-#endif
-#if ((defined NETEQ_PCM16B_CODEC)&&(defined NETEQ_32KHZ_WIDEBAND))
- case kDecoderPCM16Bswb32kHz:
- case kDecoderPCM16Bswb32kHz_2ch:
- {
- inst->deltaBytes = -18;
- inst->deltaTime = 2;
- return 0;
- }
-#endif
-#if ((defined NETEQ_PCM16B_CODEC)&&(defined NETEQ_48KHZ_WIDEBAND))
- case kDecoderPCM16Bswb48kHz:
- {
- inst->deltaBytes = -22;
- inst->deltaTime = 2;
- return 0;
- }
-#endif
-
- /* Splittable payloads */
-#ifdef NETEQ_G722_1_CODEC
- case kDecoderG722_1_16:
- {
- inst->deltaBytes = 40;
- inst->deltaTime = 320;
- return 0;
- }
- case kDecoderG722_1_24:
- {
- inst->deltaBytes = 60;
- inst->deltaTime = 320;
- return 0;
- }
- case kDecoderG722_1_32:
- {
- inst->deltaBytes = 80;
- inst->deltaTime = 320;
- return 0;
- }
-#endif
-#ifdef NETEQ_G722_1C_CODEC
- case kDecoderG722_1C_24:
- {
- inst->deltaBytes = 60;
- inst->deltaTime = 640;
- return 0;
- }
- case kDecoderG722_1C_32:
- {
- inst->deltaBytes = 80;
- inst->deltaTime = 640;
- return 0;
- }
- case kDecoderG722_1C_48:
- {
- inst->deltaBytes = 120;
- inst->deltaTime = 640;
- return 0;
- }
-#endif
-#ifdef NETEQ_G729_CODEC
- case kDecoderG729:
- {
- inst->deltaBytes = 10;
- inst->deltaTime = 80;
- return 0;
- }
-#endif
-#ifdef NETEQ_ILBC_CODEC
- case kDecoderILBC:
- {
- /* Check for splitting of iLBC packets.
- * If payload size is a multiple of 50 bytes it should be split into 30ms frames.
- * If payload size is a multiple of 38 bytes it should be split into 20ms frames.
- * Least common multiplier between 38 and 50 is 950, so the payload size must be less than
- * 950 bytes in order to resolve the frames unambiguously.
- * Currently max 12 frames in one bundle.
- */
- switch (codedsize)
- {
- case 50:
- case 100:
- case 150:
- case 200:
- case 250:
- case 300:
- case 350:
- case 400:
- case 450:
- case 500:
- case 550:
- case 600:
- {
- inst->deltaBytes = 50;
- inst->deltaTime = 240;
- break;
- }
- case 38:
- case 76:
- case 114:
- case 152:
- case 190:
- case 228:
- case 266:
- case 304:
- case 342:
- case 380:
- case 418:
- case 456:
- {
- inst->deltaBytes = 38;
- inst->deltaTime = 160;
- break;
- }
- default:
- {
- return AMBIGUOUS_ILBC_FRAME_SIZE; /* Something not supported... */
- }
- }
- return 0;
- }
-#endif
-#ifdef NETEQ_GSMFR_CODEC
- case kDecoderGSMFR:
- {
- inst->deltaBytes = 33;
- inst->deltaTime = 160;
- return 0;
- }
-#endif
- default:
- { /*Unknown codec */
- inst->deltaBytes = NO_SPLIT;
- return CODEC_DB_UNKNOWN_CODEC;
- }
- } /* end of switch */
-}
-
-/*
- * Returns 1 if codec is multiple description, 0 otherwise.
- * NOTE: This function is a stub, since there currently are no MD codecs.
- */
-int WebRtcNetEQ_DbIsMDCodec(enum WebRtcNetEQDecoder codecID)
-{
- if (0) /* Add test for MD codecs here */
- return 1;
- else
- return 0;
-}
-
-/*
- * Returns 1 if payload type is registered as a CNG codec, 0 otherwise
- */
-int WebRtcNetEQ_DbIsCNGPayload(const CodecDbInst_t *inst, int payloadType)
-{
-#ifdef NETEQ_CNG_CODEC
- int i;
-
- for(i=0; i<NUM_CNG_CODECS; i++)
- {
- if( (inst->CNGpayloadType[i] != -1) && (inst->CNGpayloadType[i] == payloadType) )
- {
- return 1;
- }
- }
-#endif
-
- return 0;
-
-}
-
-/*
- * Return the sample rate for the codec with the given payload type, 0 if error
- */
-uint16_t WebRtcNetEQ_DbGetSampleRate(CodecDbInst_t *inst, int payloadType)
-{
- int i;
- CodecFuncInst_t codecInst;
-
- /* Sanity */
- if (inst == NULL)
- {
- /* return 0 Hz */
- return 0;
- }
-
- /* Check among CNG payloads */
- for (i = 0; i < NUM_CNG_CODECS; i++)
- {
- if ((inst->CNGpayloadType[i] != -1) && (inst->CNGpayloadType[i] == payloadType))
- {
- switch (i)
- {
-#ifdef NETEQ_WIDEBAND
- case 1:
- return 16000;
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- case 2:
- return 32000;
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- case 3:
- return 48000;
-#endif
- default:
- return 8000;
- }
- }
- }
-
- /* Not a CNG payload, check the other payloads */
- i = WebRtcNetEQ_DbGetCodec(inst, payloadType);
- if (i >= 0)
- {
- if (WebRtcNetEQ_DbGetPtrs(inst, (enum WebRtcNetEQDecoder) i, &codecInst) != 0)
- {
- /* Unexpected error, return 0 Hz */
- return 0;
- }
- return codecInst.codec_fs;
- }
-
- /* If we end up here, we got an error, return 0 Hz */
- return 0;
-
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db.h
deleted file mode 100644
index cc4b48e6f26..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db.h
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Interface for the codec database.
- */
-
-#ifndef CODEC_DB_H
-#define CODEC_DB_H
-
-#include "typedefs.h"
-
-#include "webrtc_neteq.h"
-#include "codec_db_defines.h"
-#include "neteq_defines.h"
-
-#if defined(NETEQ_48KHZ_WIDEBAND)
- #define NUM_CNG_CODECS 4
-#elif defined(NETEQ_32KHZ_WIDEBAND)
- #define NUM_CNG_CODECS 3
-#elif defined(NETEQ_WIDEBAND)
- #define NUM_CNG_CODECS 2
-#else
- #define NUM_CNG_CODECS 1
-#endif
-
-typedef struct
-{
-
- int16_t position[NUM_TOTAL_CODECS];
- int16_t nrOfCodecs;
-
- int16_t payloadType[NUM_CODECS];
- FuncDecode funcDecode[NUM_CODECS];
- FuncDecode funcDecodeRCU[NUM_CODECS];
- FuncDecodePLC funcDecodePLC[NUM_CODECS];
- FuncDecodeInit funcDecodeInit[NUM_CODECS];
- FuncAddLatePkt funcAddLatePkt[NUM_CODECS];
- FuncGetMDinfo funcGetMDinfo[NUM_CODECS];
- FuncGetPitchInfo funcGetPitch[NUM_CODECS];
- FuncUpdBWEst funcUpdBWEst[NUM_CODECS];
- FuncDurationEst funcDurationEst[NUM_CODECS];
- FuncGetErrorCode funcGetErrorCode[NUM_CODECS];
- void * codec_state[NUM_CODECS];
- uint16_t codec_fs[NUM_CODECS];
- int16_t CNGpayloadType[NUM_CNG_CODECS];
-
-} CodecDbInst_t;
-
-#define NO_SPLIT -1 /* codec payload cannot be split */
-
-typedef struct
-{
- int16_t deltaBytes;
- int16_t deltaTime;
-} SplitInfo_t;
-
-/*
- * Resets the codec database.
- */
-int WebRtcNetEQ_DbReset(CodecDbInst_t *inst);
-
-/*
- * Adds a new codec to the database.
- */
-int WebRtcNetEQ_DbAdd(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec,
- int16_t payloadType, FuncDecode funcDecode,
- FuncDecode funcDecodeRCU, FuncDecodePLC funcDecodePLC,
- FuncDecodeInit funcDecodeInit, FuncAddLatePkt funcAddLatePkt,
- FuncGetMDinfo funcGetMDinfo, FuncGetPitchInfo funcGetPitch,
- FuncUpdBWEst funcUpdBWEst, FuncDurationEst funcDurationEst,
- FuncGetErrorCode funcGetErrorCode, void* codec_state,
- uint16_t codec_fs);
-
-/*
- * Removes a codec from the database.
- */
-int WebRtcNetEQ_DbRemove(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec);
-
-/*
- * Get the decoder function pointers for a codec.
- */
-int WebRtcNetEQ_DbGetPtrs(CodecDbInst_t *inst, enum WebRtcNetEQDecoder,
- CodecFuncInst_t *ptr_inst);
-
-/*
- * Returns payload number given a codec identifier.
- */
-
-int WebRtcNetEQ_DbGetPayload(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codecID);
-
-/*
- * Returns codec identifier given a payload number.
- */
-
-int WebRtcNetEQ_DbGetCodec(const CodecDbInst_t *inst, int payloadType);
-
-/*
- * Extracts the Payload Split information of the codec with the specified payloadType.
- */
-
-int WebRtcNetEQ_DbGetSplitInfo(SplitInfo_t *inst, enum WebRtcNetEQDecoder codecID,
- int codedsize);
-
-/*
- * Returns 1 if codec is multiple description type, 0 otherwise.
- */
-int WebRtcNetEQ_DbIsMDCodec(enum WebRtcNetEQDecoder codecID);
-
-/*
- * Returns 1 if payload type is registered as a CNG codec, 0 otherwise.
- */
-int WebRtcNetEQ_DbIsCNGPayload(const CodecDbInst_t *inst, int payloadType);
-
-/*
- * Return the sample rate for the codec with the given payload type, 0 if error.
- */
-uint16_t WebRtcNetEQ_DbGetSampleRate(CodecDbInst_t *inst, int payloadType);
-
-#endif
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db_defines.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db_defines.h
deleted file mode 100644
index d97306a333c..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/codec_db_defines.h
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Some definitions related to the codec database.
- */
-
-#ifndef CODEC_DB_DEFINES_H
-#define CODEC_DB_DEFINES_H
-
-#include "typedefs.h"
-
-#define NUM_CODECS 47 /* probably too large with the limited set of supported codecs*/
-#define NUM_TOTAL_CODECS kDecoderReservedEnd
-
-/*
- * Pointer to decoder function.
- */
-typedef int16_t (*FuncDecode)(void* state, int16_t* encoded, int16_t len,
- int16_t* decoded, int16_t* speechType);
-
-/*
- * Pointer to PLC function.
- */
-typedef int16_t (*FuncDecodePLC)(void* state, int16_t* decodec,
- int16_t frames);
-
-/*
- * Pointer to decoder init function.
- */
-typedef int16_t (*FuncDecodeInit)(void* state);
-
-/*
- * Pointer to add late packet function.
- */
-typedef int16_t
- (*FuncAddLatePkt)(void* state, int16_t* encoded, int16_t len);
-
-/*
- * Pointer to get MD infofunction.
- */
-typedef int16_t (*FuncGetMDinfo)(void* state);
-
-/*
- * Pointer to pitch info function.
- * Return 0 for unvoiced, -1 if pitch not availiable.
- */
-typedef int16_t (*FuncGetPitchInfo)(void* state, int16_t* encoded,
- int16_t* length);
-
-/*
- * Pointer to the update bandwidth estimate function
- */
-typedef int16_t (*FuncUpdBWEst)(void* state, const uint16_t *encoded,
- int32_t packet_size,
- uint16_t rtp_seq_number, uint32_t send_ts,
- uint32_t arr_ts);
-
-/*
- * Pointer to the frame size estimate function.
- * Returns the estimated number of samples in the packet.
- */
-typedef int (*FuncDurationEst)(void* state, const uint8_t* payload,
- int payload_length_bytes);
-
-/*
- * Pointer to error code function
- */
-typedef int16_t (*FuncGetErrorCode)(void* state);
-
-typedef struct CodecFuncInst_t_
-{
-
- FuncDecode funcDecode;
- FuncDecode funcDecodeRCU;
- FuncDecodePLC funcDecodePLC;
- FuncDecodeInit funcDecodeInit;
- FuncAddLatePkt funcAddLatePkt;
- FuncGetMDinfo funcGetMDinfo;
- FuncUpdBWEst funcUpdBWEst; /* Currently in use for the ISAC family (without LC) only*/
- FuncDurationEst funcDurationEst;
- FuncGetErrorCode funcGetErrorCode;
- void * codec_state;
- uint16_t codec_fs;
- uint32_t timeStamp;
-
-} CodecFuncInst_t;
-
-#endif
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.cc
index 360767af193..31bb40c9275 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.cc
@@ -8,15 +8,15 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/comfort_noise.h"
+#include "webrtc/modules/audio_coding/neteq/comfort_noise.h"
#include <assert.h>
#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/dsp_helper.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.h
index 7e7c294ff0e..d4655962456 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise.h
@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_COMFORT_NOISE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_COMFORT_NOISE_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_COMFORT_NOISE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_COMFORT_NOISE_H_
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -70,4 +70,4 @@ class ComfortNoise {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_COMFORT_NOISE_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_COMFORT_NOISE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise_unittest.cc
index 0e849717125..6a1bbe0d143 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/comfort_noise_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/comfort_noise_unittest.cc
@@ -10,11 +10,11 @@
// Unit tests for ComfortNoise class.
-#include "webrtc/modules/audio_coding/neteq4/comfort_noise.h"
+#include "webrtc/modules/audio_coding/neteq/comfort_noise.h"
#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/correlator.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/correlator.c
deleted file mode 100644
index 0a4404a432b..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/correlator.c
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "dsp.h"
-
-#include "signal_processing_library.h"
-
-#include "dsp_helpfunctions.h"
-
-/* Scratch usage:
-
- Type Name size startpos endpos
- int16_t pw16_corrVec 62 0 61
- int16_t pw16_data_ds 124 0 123
- int32_t pw32_corr 2*54 124 231
-
- Total: 232
- */
-
-#define SCRATCH_pw16_corrVec 0
-#define SCRATCH_pw16_data_ds 0
-#define SCRATCH_pw32_corr 124
-
-#define NETEQ_CORRELATOR_DSVECLEN 124 /* 124 = 60 + 10 + 54 */
-
-int16_t WebRtcNetEQ_Correlator(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_data,
- int16_t w16_dataLen,
- int16_t *pw16_corrOut,
- int16_t *pw16_corrScale)
-{
- int16_t w16_corrLen = 60;
-#ifdef SCRATCH
- int16_t *pw16_data_ds = pw16_scratchPtr + SCRATCH_pw16_corrVec;
- int32_t *pw32_corr = (int32_t*) (pw16_scratchPtr + SCRATCH_pw32_corr);
- /* int16_t *pw16_corrVec = pw16_scratchPtr + SCRATCH_pw16_corrVec;*/
-#else
- int16_t pw16_data_ds[NETEQ_CORRELATOR_DSVECLEN];
- int32_t pw32_corr[54];
- /* int16_t pw16_corrVec[4+54+4];*/
-#endif
- /* int16_t *pw16_corr=&pw16_corrVec[4];*/
- int16_t w16_maxVal;
- int32_t w32_maxVal;
- int16_t w16_normVal;
- int16_t w16_normVal2;
- /* int16_t w16_corrUpsLen;*/
- int16_t *pw16_B = NULL;
- int16_t w16_Blen = 0;
- int16_t w16_factor = 0;
-
- /* Set constants depending on frequency used */
- if (inst->fs == 8000)
- {
- w16_Blen = 3;
- w16_factor = 2;
- pw16_B = (int16_t*) WebRtcNetEQ_kDownsample8kHzTbl;
-#ifdef NETEQ_WIDEBAND
- }
- else if (inst->fs==16000)
- {
- w16_Blen = 5;
- w16_factor = 4;
- pw16_B = (int16_t*)WebRtcNetEQ_kDownsample16kHzTbl;
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- }
- else if (inst->fs==32000)
- {
- w16_Blen = 7;
- w16_factor = 8;
- pw16_B = (int16_t*)WebRtcNetEQ_kDownsample32kHzTbl;
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- }
- else /* if inst->fs==48000 */
- {
- w16_Blen = 7;
- w16_factor = 12;
- pw16_B = (int16_t*)WebRtcNetEQ_kDownsample48kHzTbl;
-#endif
- }
-
- /* Downsample data in order to work on a 4 kHz sampled signal */
- WebRtcSpl_DownsampleFast(
- pw16_data + w16_dataLen - (NETEQ_CORRELATOR_DSVECLEN * w16_factor),
- (int16_t) (NETEQ_CORRELATOR_DSVECLEN * w16_factor), pw16_data_ds,
- NETEQ_CORRELATOR_DSVECLEN, pw16_B, w16_Blen, w16_factor, (int16_t) 0);
-
- /* Normalize downsampled vector to using entire 16 bit */
- w16_maxVal = WebRtcSpl_MaxAbsValueW16(pw16_data_ds, 124);
- w16_normVal = 16 - WebRtcSpl_NormW32((int32_t) w16_maxVal);
- WebRtcSpl_VectorBitShiftW16(pw16_data_ds, NETEQ_CORRELATOR_DSVECLEN, pw16_data_ds,
- w16_normVal);
-
- /* Correlate from lag 10 to lag 60 (20..120 in NB and 40..240 in WB) */
-
- WebRtcNetEQ_CrossCorr(
- pw32_corr, &pw16_data_ds[NETEQ_CORRELATOR_DSVECLEN - w16_corrLen],
- &pw16_data_ds[NETEQ_CORRELATOR_DSVECLEN - w16_corrLen - 10], 60, 54,
- 6 /*maxValue... shifts*/, -1);
-
- /*
- * Move data from w32 to w16 vector.
- * Normalize downsampled vector to using all 14 bits
- */
- w32_maxVal = WebRtcSpl_MaxAbsValueW32(pw32_corr, 54);
- w16_normVal2 = 18 - WebRtcSpl_NormW32(w32_maxVal);
- w16_normVal2 = WEBRTC_SPL_MAX(w16_normVal2, 0);
-
- WebRtcSpl_VectorBitShiftW32ToW16(pw16_corrOut, 54, pw32_corr, w16_normVal2);
-
- /* Total scale factor (right shifts) of correlation value */
- *pw16_corrScale = 2 * w16_normVal + 6 + w16_normVal2;
-
- return (50 + 1);
-}
-
-#undef SCRATCH_pw16_corrVec
-#undef SCRATCH_pw16_data_ds
-#undef SCRATCH_pw32_corr
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.cc
index 04b886a2e2e..5fb054c7850 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.cc
@@ -8,17 +8,17 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/decision_logic.h"
+#include "webrtc/modules/audio_coding/neteq/decision_logic.h"
#include <algorithm>
-#include "webrtc/modules/audio_coding/neteq4/buffer_level_filter.h"
-#include "webrtc/modules/audio_coding/neteq4/decision_logic_fax.h"
-#include "webrtc/modules/audio_coding/neteq4/decision_logic_normal.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_manager.h"
-#include "webrtc/modules/audio_coding/neteq4/expand.h"
-#include "webrtc/modules/audio_coding/neteq4/packet_buffer.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
+#include "webrtc/modules/audio_coding/neteq/decision_logic_fax.h"
+#include "webrtc/modules/audio_coding/neteq/decision_logic_normal.h"
+#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -145,8 +145,8 @@ Operations DecisionLogic::GetDecision(const SyncBuffer& sync_buffer,
reset_decoder);
}
-void DecisionLogic::ExpandDecision(bool is_expand_decision) {
- if (is_expand_decision) {
+void DecisionLogic::ExpandDecision(Operations operation) {
+ if (operation == kExpand) {
num_consecutive_expands_++;
} else {
num_consecutive_expands_ = 0;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.h
index aca5ca40559..672ce939d41 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic.h
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECISION_LOGIC_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECISION_LOGIC_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_H_
-#include "webrtc/modules/audio_coding/neteq4/defines.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/defines.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -92,7 +92,7 @@ class DecisionLogic {
// not. Note that this is necessary, since an expand decision can be changed
// to kNormal in NetEqImpl::GetDecision if there is still enough data in the
// sync buffer.
- void ExpandDecision(bool is_expand_decision);
+ virtual void ExpandDecision(Operations operation);
// Adds |value| to |sample_memory_|.
void AddSampleMemory(int32_t value) {
@@ -165,4 +165,4 @@ class DecisionLogic {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECISION_LOGIC_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_fax.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.cc
index 00c8bcf4a2b..08a4c4cb646 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_fax.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.cc
@@ -8,14 +8,14 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/decision_logic_fax.h"
+#include "webrtc/modules/audio_coding/neteq/decision_logic_fax.h"
#include <assert.h>
#include <algorithm>
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_fax.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.h
index 1ccd3524473..01a948fa429 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_fax.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_fax.h
@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECISION_LOGIC_FAX_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECISION_LOGIC_FAX_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_FAX_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_FAX_H_
-#include "webrtc/modules/audio_coding/neteq4/decision_logic.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/decision_logic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -60,4 +60,4 @@ class DecisionLogicFax : public DecisionLogic {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECISION_LOGIC_FAX_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_FAX_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_normal.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc
index 8688737c3ed..97a8843ae0a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_normal.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.cc
@@ -8,18 +8,18 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/decision_logic_normal.h"
+#include "webrtc/modules/audio_coding/neteq/decision_logic_normal.h"
#include <assert.h>
#include <algorithm>
-#include "webrtc/modules/audio_coding/neteq4/buffer_level_filter.h"
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_manager.h"
-#include "webrtc/modules/audio_coding/neteq4/expand.h"
-#include "webrtc/modules/audio_coding/neteq4/packet_buffer.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
#include "webrtc/modules/interface/module_common_types.h"
namespace webrtc {
@@ -44,7 +44,7 @@ Operations DecisionLogicNormal::GetDecisionSpecialized(
uint32_t target_timestamp = sync_buffer.end_timestamp();
uint32_t available_timestamp = 0;
- int is_cng_packet = 0;
+ bool is_cng_packet = false;
if (packet_header) {
available_timestamp = packet_header->timestamp;
is_cng_packet =
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_normal.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.h
index 783b001fc3e..a339d160f29 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_normal.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_normal.h
@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECISION_LOGIC_NORMAL_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECISION_LOGIC_NORMAL_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_NORMAL_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_NORMAL_H_
-#include "webrtc/modules/audio_coding/neteq4/decision_logic.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/decision_logic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -38,6 +38,10 @@ class DecisionLogicNormal : public DecisionLogic {
virtual ~DecisionLogicNormal() {}
protected:
+ static const int kAllowMergeWithoutExpandMs = 20; // 20 ms.
+ static const int kReinitAfterExpands = 100;
+ static const int kMaxWaitForPacket = 10;
+
// Returns the operation that should be done next. |sync_buffer| and |expand|
// are provided for reference. |decoder_frame_length| is the number of samples
// obtained from the last decoded frame. If there is a packet available, the
@@ -54,31 +58,28 @@ class DecisionLogicNormal : public DecisionLogic {
Modes prev_mode, bool play_dtmf,
bool* reset_decoder);
- private:
- static const int kAllowMergeWithoutExpandMs = 20; // 20 ms.
- static const int kReinitAfterExpands = 100;
- static const int kMaxWaitForPacket = 10;
+ // Returns the operation to do given that the expected packet is not
+ // available, but a packet further into the future is at hand.
+ virtual Operations FuturePacketAvailable(
+ const SyncBuffer& sync_buffer,
+ const Expand& expand,
+ int decoder_frame_length, Modes prev_mode,
+ uint32_t target_timestamp,
+ uint32_t available_timestamp,
+ bool play_dtmf);
- // Returns the operation given that the next available packet is a comfort
- // noise payload (RFC 3389 only, not codec-internal).
- Operations CngOperation(Modes prev_mode, uint32_t target_timestamp,
- uint32_t available_timestamp);
+ // Returns the operation to do given that the expected packet is available.
+ virtual Operations ExpectedPacketAvailable(Modes prev_mode, bool play_dtmf);
// Returns the operation given that no packets are available (except maybe
// a DTMF event, flagged by setting |play_dtmf| true).
- Operations NoPacket(bool play_dtmf);
+ virtual Operations NoPacket(bool play_dtmf);
- // Returns the operation to do given that the expected packet is available.
- Operations ExpectedPacketAvailable(Modes prev_mode, bool play_dtmf);
-
- // Returns the operation to do given that the expected packet is not
- // available, but a packet further into the future is at hand.
- Operations FuturePacketAvailable(const SyncBuffer& sync_buffer,
- const Expand& expand,
- int decoder_frame_length, Modes prev_mode,
- uint32_t target_timestamp,
- uint32_t available_timestamp,
- bool play_dtmf);
+ private:
+ // Returns the operation given that the next available packet is a comfort
+ // noise payload (RFC 3389 only, not codec-internal).
+ Operations CngOperation(Modes prev_mode, uint32_t target_timestamp,
+ uint32_t available_timestamp);
// Checks if enough time has elapsed since the last successful timescale
// operation was done (i.e., accelerate or preemptive expand).
@@ -103,4 +104,4 @@ class DecisionLogicNormal : public DecisionLogic {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECISION_LOGIC_NORMAL_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECISION_LOGIC_NORMAL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_unittest.cc
index d596c0519a3..f9056a6cbae 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decision_logic_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decision_logic_unittest.cc
@@ -11,12 +11,12 @@
// Unit tests for DecisionLogic class and derived classes.
#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/buffer_level_filter.h"
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/decision_logic.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_manager.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_peak_detector.h"
-#include "webrtc/modules/audio_coding/neteq4/packet_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/decision_logic.h"
+#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
+#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
+#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
namespace webrtc {
@@ -24,7 +24,7 @@ TEST(DecisionLogic, CreateAndDestroy) {
int fs_hz = 8000;
int output_size_samples = fs_hz / 100; // Samples per 10 ms.
DecoderDatabase decoder_database;
- PacketBuffer packet_buffer(10, 1000);
+ PacketBuffer packet_buffer(10);
DelayPeakDetector delay_peak_detector;
DelayManager delay_manager(240, &delay_peak_detector);
BufferLevelFilter buffer_level_filter;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.cc
index 8d87519b25a..5049962b45b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.cc
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
#include <assert.h>
#include <utility> // pair
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.h
index 9effd525ded..8a03f2123be 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database.h
@@ -8,15 +8,15 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECODER_DATABASE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECODER_DATABASE_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECODER_DATABASE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECODER_DATABASE_H_
#include <map>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/common_types.h" // NULL
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-#include "webrtc/modules/audio_coding/neteq4/packet.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -155,4 +155,4 @@ class DecoderDatabase {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DECODER_DATABASE_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DECODER_DATABASE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc
index 76f5a099ecb..d0c6f5ae891 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/decoder_database_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/decoder_database_unittest.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
#include <assert.h>
#include <stdlib.h>
@@ -18,7 +18,7 @@
#include "gmock/gmock.h"
#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h"
#include "webrtc/test/testsupport/gtest_disable.h"
namespace webrtc {
@@ -68,11 +68,11 @@ TEST(DecoderDatabase, GetRtpPayloadType) {
db.GetRtpPayloadType(kDecoderISAC)); // iSAC is not registered.
}
-TEST(DecoderDatabase, DISABLED_ON_ANDROID(GetDecoder)) {
+TEST(DecoderDatabase, GetDecoder) {
DecoderDatabase db;
const uint8_t kPayloadType = 0;
EXPECT_EQ(DecoderDatabase::kOK,
- db.RegisterPayload(kPayloadType, kDecoderILBC));
+ db.RegisterPayload(kPayloadType, kDecoderPCM16B));
AudioDecoder* dec = db.GetDecoder(kPayloadType);
ASSERT_TRUE(dec != NULL);
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/defines.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/defines.h
index b6f9eb2bc14..33d1bd9c3f0 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/defines.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/defines.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DEFINES_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DEFINES_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DEFINES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DEFINES_H_
namespace webrtc {
@@ -48,4 +48,4 @@ enum Modes {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DEFINES_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DEFINES_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_logging.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_logging.h
deleted file mode 100644
index 04b1c401528..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_logging.h
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Contains definitions for the delay logging functionality. Only used for debugging and
- * tracing purposes.
- */
-
-#ifndef DELAY_LOGGING_H
-#define DELAY_LOGGING_H
-
-#define NETEQ_DELAY_LOGGING_VERSION_STRING "2.0"
-
-#define NETEQ_DELAY_LOGGING_SIGNAL_RECIN 1
-#define NETEQ_DELAY_LOGGING_SIGNAL_FLUSH 2
-#define NETEQ_DELAY_LOGGING_SIGNAL_CLOCK 3
-#define NETEQ_DELAY_LOGGING_SIGNAL_EOF 4
-#define NETEQ_DELAY_LOGGING_SIGNAL_DECODE 5
-#define NETEQ_DELAY_LOGGING_SIGNAL_CHANGE_FS 6
-#define NETEQ_DELAY_LOGGING_SIGNAL_MERGE_INFO 7
-#define NETEQ_DELAY_LOGGING_SIGNAL_EXPAND_INFO 8
-#define NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO 9
-#define NETEQ_DELAY_LOGGING_SIGNAL_PREEMPTIVE_INFO 10
-#define NETEQ_DELAY_LOGGING_SIGNAL_OPTBUF 11
-#define NETEQ_DELAY_LOGGING_SIGNAL_DECODE_ONE_DESC 12
-
-#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.cc
index e80b9de5142..a935561eff4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/delay_manager.h"
+#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
#include <assert.h>
#include <math.h>
@@ -16,7 +16,7 @@
#include <algorithm> // max, min
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_peak_detector.h"
+#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/system_wrappers/interface/logging.h"
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.h
index ed1e87b190d..96b5e19ebdc 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager.h
@@ -8,15 +8,15 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DELAY_MANAGER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DELAY_MANAGER_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DELAY_MANAGER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DELAY_MANAGER_H_
#include <string.h> // Provide access to size_t.
#include <vector>
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -161,4 +161,4 @@ class DelayManager {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DELAY_MANAGER_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DELAY_MANAGER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager_unittest.cc
index 482a65c9a48..6f9733234d5 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_manager_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_manager_unittest.cc
@@ -10,13 +10,13 @@
// Unit tests for DelayManager class.
-#include "webrtc/modules/audio_coding/neteq4/delay_manager.h"
+#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
#include <math.h>
#include "gmock/gmock.h"
#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_delay_peak_detector.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.cc
index fd5b9c08f0e..5996d7d197a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/delay_peak_detector.h"
+#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
#include <algorithm> // max
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.h
index dfdd2537d7d..8bf6aba8b55 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector.h
@@ -8,14 +8,14 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DELAY_PEAK_DETECTOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DELAY_PEAK_DETECTOR_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DELAY_PEAK_DETECTOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DELAY_PEAK_DETECTOR_H_
#include <string.h> // size_t
#include <list>
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
namespace webrtc {
@@ -73,4 +73,4 @@ class DelayPeakDetector {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DELAY_PEAK_DETECTOR_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DELAY_PEAK_DETECTOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector_unittest.cc
index a3b48209cff..080309be014 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/delay_peak_detector_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/delay_peak_detector_unittest.cc
@@ -10,7 +10,7 @@
// Unit tests for DelayPeakDetector class.
-#include "webrtc/modules/audio_coding/neteq4/delay_peak_detector.h"
+#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
#include "gtest/gtest.h"
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp.c
deleted file mode 100644
index ea2fa87d501..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp.c
+++ /dev/null
@@ -1,532 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains some DSP initialization functions and
- * constant table definitions.
- */
-
-#include "dsp.h"
-
-#include "signal_processing_library.h"
-
-#include "neteq_error_codes.h"
-
-/* Filter coefficients used when downsampling from the indicated
- sample rates (8, 16, 32, 48 kHz) to 4 kHz.
- Coefficients are in Q12. */
-
-/* {0.3, 0.4, 0.3} */
-const int16_t WebRtcNetEQ_kDownsample8kHzTbl[] = { 1229, 1638, 1229 };
-
-#ifdef NETEQ_WIDEBAND
-/* {0.15, 0.2, 0.3, 0.2, 0.15} */
-const int16_t WebRtcNetEQ_kDownsample16kHzTbl[] =
-{ 614, 819, 1229, 819, 614};
-#endif
-
-#ifdef NETEQ_32KHZ_WIDEBAND
-/* {0.1425, 0.1251, 0.1525, 0.1628, 0.1525, 0.1251, 0.1425} */
-const int16_t WebRtcNetEQ_kDownsample32kHzTbl[] =
-{ 584, 512, 625, 667, 625, 512, 584};
-#endif
-
-#ifdef NETEQ_48KHZ_WIDEBAND
-/* {0.2487, 0.0952, 0.1042, 0.1074, 0.1042, 0.0952, 0.2487} */
-const int16_t WebRtcNetEQ_kDownsample48kHzTbl[] =
-{ 1019, 390, 427, 440, 427, 390, 1019};
-#endif
-
-/* Constants used in expand function WebRtcNetEQ_Expand */
-
-/* Q12: -1.264421 + 4.8659148*x - 4.0092827*x^2 + 1.4100529*x^3 */
-const int16_t WebRtcNetEQ_kMixFractionFuncTbl[4] = { -5179, 19931, -16422, 5776 };
-
-/* Tabulated divisions to save complexity */
-/* 1049/{0, .., 6} */
-const int16_t WebRtcNetEQ_k1049div[7] = { 0, 1049, 524, 349, 262, 209, 174 };
-
-/* 2097/{0, .., 6} */
-const int16_t WebRtcNetEQ_k2097div[7] = { 0, 2097, 1048, 699, 524, 419, 349 };
-
-/* 5243/{0, .., 6} */
-const int16_t WebRtcNetEQ_k5243div[7] = { 0, 5243, 2621, 1747, 1310, 1048, 873 };
-
-#ifdef WEBRTC_NETEQ_40BITACC_TEST
-/*
- * Run NetEQ with simulated 40-bit accumulator to run bit-exact to a DSP
- * implementation where the main (spl and NetEQ) functions have been
- * 40-bit optimized. For testing purposes.
- */
-
-/****************************************************************************
- * WebRtcNetEQ_40BitAccCrossCorr(...)
- *
- * Calculates the Cross correlation between two sequences seq1 and seq2. Seq1
- * is fixed and seq2 slides as the pointer is increased with step
- *
- * Input:
- * - seq1 : First sequence (fixed throughout the correlation)
- * - seq2 : Second sequence (slided step_seq2 for each
- * new correlation)
- * - dimSeq : Number of samples to use in the cross correlation.
- * Should be no larger than 1024 to avoid overflow.
- * - dimCrossCorr : Number of CrossCorrelations to calculate (start
- * position for seq2 is updated for each new one)
- * - rShift : Number of right shifts to use
- * - step_seq2 : How many (positive or negative) steps the seq2
- * pointer should be updated for each new cross
- * correlation value
- *
- * Output:
- * - crossCorr : The cross correlation in Q-rShift
- */
-
-void WebRtcNetEQ_40BitAccCrossCorr(int32_t *crossCorr,
- int16_t *seq1,
- int16_t *seq2,
- int16_t dimSeq,
- int16_t dimCrossCorr,
- int16_t rShift,
- int16_t step_seq2)
-{
- int i, j;
- int16_t *seq1Ptr, *seq2Ptr;
- int64_t acc;
-
- for (i = 0; i < dimCrossCorr; i++)
- {
- /* Set the pointer to the static vector, set the pointer to
- the sliding vector and initialize crossCorr */
- seq1Ptr = seq1;
- seq2Ptr = seq2 + (step_seq2 * i);
- acc = 0;
-
- /* Perform the cross correlation */
- for (j = 0; j < dimSeq; j++)
- {
- acc += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
- seq1Ptr++;
- seq2Ptr++;
- }
-
- (*crossCorr) = (int32_t) (acc >> rShift);
- crossCorr++;
- }
-}
-
-/****************************************************************************
- * WebRtcNetEQ_40BitAccDotW16W16(...)
- *
- * Calculates the dot product between two vectors (int16_t)
- *
- * Input:
- * - vector1 : Vector 1
- * - vector2 : Vector 2
- * - len : Number of samples in vector
- * Should be no larger than 1024 to avoid overflow.
- * - scaling : The number of left shifts required to avoid overflow
- * in the dot product
- * Return value : The dot product
- */
-
-int32_t WebRtcNetEQ_40BitAccDotW16W16(int16_t *vector1,
- int16_t *vector2,
- int len,
- int scaling)
-{
- int32_t sum;
- int i;
- int64_t acc;
-
- acc = 0;
- for (i = 0; i < len; i++)
- {
- acc += WEBRTC_SPL_MUL_16_16(*vector1++, *vector2++);
- }
-
- sum = (int32_t) (acc >> scaling);
-
- return(sum);
-}
-
-#endif /* WEBRTC_NETEQ_40BITACC_TEST */
-
-/****************************************************************************
- * WebRtcNetEQ_DSPInit(...)
- *
- * Initializes DSP side of NetEQ.
- *
- * Input:
- * - inst : NetEq DSP instance
- * - fs : Initial sample rate (may change when decoding data)
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- * : non-zero - error
- */
-
-int WebRtcNetEQ_DSPInit(DSPInst_t *inst, uint16_t fs)
-{
-
- int res = 0;
- int16_t fs_mult;
-
- /* Pointers and values to save before clearing the instance */
-#ifdef NETEQ_CNG_CODEC
- void *savedPtr1 = inst->CNG_Codec_inst;
-#endif
- void *savedPtr2 = inst->pw16_readAddress;
- void *savedPtr3 = inst->pw16_writeAddress;
- void *savedPtr4 = inst->main_inst;
-#ifdef NETEQ_VAD
- void *savedVADptr = inst->VADInst.VADState;
- VADInitFunction savedVADinit = inst->VADInst.initFunction;
- VADSetmodeFunction savedVADsetmode = inst->VADInst.setmodeFunction;
- VADFunction savedVADfunc = inst->VADInst.VADFunction;
- int16_t savedVADEnabled = inst->VADInst.VADEnabled;
- int savedVADMode = inst->VADInst.VADMode;
-#endif /* NETEQ_VAD */
- DSPStats_t saveStats;
- int16_t saveMsPerCall = inst->millisecondsPerCall;
- enum BGNMode saveBgnMode = inst->BGNInst.bgnMode;
-#ifdef NETEQ_STEREO
- MasterSlaveInfo* saveMSinfo = inst->msInfo;
-#endif
-
- /* copy contents of statInst to avoid clearing */WEBRTC_SPL_MEMCPY_W16(&saveStats, &(inst->statInst),
- sizeof(DSPStats_t)/sizeof(int16_t));
-
- /* check that the sample rate is valid */
- if ((fs != 8000)
-#ifdef NETEQ_WIDEBAND
- &&(fs!=16000)
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- &&(fs!=32000)
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- &&(fs!=48000)
-#endif
- )
- {
- /* invalid rate */
- return (CODEC_DB_UNSUPPORTED_FS);
- }
-
- /* calcualte fs/8000 */
- fs_mult = WebRtcSpl_DivW32W16ResW16(fs, 8000);
-
- /* Set everything to zero since most variables should be zero at start */
- WebRtcSpl_MemSetW16((int16_t *) inst, 0, sizeof(DSPInst_t) / sizeof(int16_t));
-
- /* Restore saved pointers */
-#ifdef NETEQ_CNG_CODEC
- inst->CNG_Codec_inst = (CNG_dec_inst *)savedPtr1;
-#endif
- inst->pw16_readAddress = (int16_t *) savedPtr2;
- inst->pw16_writeAddress = (int16_t *) savedPtr3;
- inst->main_inst = savedPtr4;
-#ifdef NETEQ_VAD
- inst->VADInst.VADState = savedVADptr;
- inst->VADInst.initFunction = savedVADinit;
- inst->VADInst.setmodeFunction = savedVADsetmode;
- inst->VADInst.VADFunction = savedVADfunc;
- inst->VADInst.VADEnabled = savedVADEnabled;
- inst->VADInst.VADMode = savedVADMode;
-#endif /* NETEQ_VAD */
-
- /* Initialize main part */
- inst->fs = fs;
- inst->millisecondsPerCall = saveMsPerCall;
- inst->timestampsPerCall = inst->millisecondsPerCall * 8 * fs_mult;
- inst->ExpandInst.w16_overlap = 5 * fs_mult;
- inst->endPosition = 565 * fs_mult;
- inst->curPosition = inst->endPosition - inst->ExpandInst.w16_overlap;
- inst->w16_seedInc = 1;
- inst->uw16_seed = 777;
- inst->w16_muteFactor = 16384; /* 1.0 in Q14 */
- inst->w16_frameLen = 3 * inst->timestampsPerCall; /* Dummy initialize to 30ms */
-
- inst->w16_speechHistoryLen = 256 * fs_mult;
- inst->pw16_speechHistory = &inst->speechBuffer[inst->endPosition
- - inst->w16_speechHistoryLen];
- inst->ExpandInst.pw16_overlapVec = &(inst->pw16_speechHistory[inst->w16_speechHistoryLen
- - inst->ExpandInst.w16_overlap]);
-
- /* Reusage of memory in speechBuffer inside Expand */
- inst->ExpandInst.pw16_expVecs[0] = &inst->speechBuffer[0];
- inst->ExpandInst.pw16_expVecs[1] = &inst->speechBuffer[126 * fs_mult];
- inst->ExpandInst.pw16_arState = &inst->speechBuffer[2 * 126 * fs_mult];
- inst->ExpandInst.pw16_arFilter = &inst->speechBuffer[2 * 126 * fs_mult
- + UNVOICED_LPC_ORDER];
- /* Ends at 2*126*fs_mult+UNVOICED_LPC_ORDER+(UNVOICED_LPC_ORDER+1) */
-
- inst->ExpandInst.w16_expandMuteFactor = 16384; /* 1.0 in Q14 */
-
- /* Initialize BGN part */
- inst->BGNInst.pw16_filter[0] = 4096;
- inst->BGNInst.w16_scale = 20000;
- inst->BGNInst.w16_scaleShift = 24;
- inst->BGNInst.w32_energyUpdate = 500000;
- inst->BGNInst.w32_energyUpdateLow = 0;
- inst->BGNInst.w32_energy = 2500;
- inst->BGNInst.w16_initialized = 0;
- inst->BGNInst.bgnMode = saveBgnMode;
-
- /* Recreate statistics counters */WEBRTC_SPL_MEMCPY_W16(&(inst->statInst), &saveStats,
- sizeof(DSPStats_t)/sizeof(int16_t));
-
-#ifdef NETEQ_STEREO
- /* Write back the pointer. */
- inst->msInfo = saveMSinfo;
-#endif
-
-#ifdef NETEQ_CNG_CODEC
- if (inst->CNG_Codec_inst!=NULL)
- {
- /* initialize comfort noise generator */
- res |= WebRtcCng_InitDec(inst->CNG_Codec_inst);
- }
-#endif
-
-#ifdef NETEQ_VAD
- /* initialize PostDecode VAD instance
- (don't bother checking for NULL instance, this is done inside init function) */
- res |= WebRtcNetEQ_InitVAD(&inst->VADInst, fs);
-#endif /* NETEQ_VAD */
-
- return (res);
-}
-
-/****************************************************************************
- * WebRtcNetEQ_AddressInit(...)
- *
- * Initializes the shared-memory communication on the DSP side.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - data2McuAddress : Pointer to memory where DSP writes / MCU reads
- * - data2DspAddress : Pointer to memory where MCU writes / DSP reads
- * - mainInst : NetEQ main instance
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- */
-
-int WebRtcNetEQ_AddressInit(DSPInst_t *inst, const void *data2McuAddress,
- const void *data2DspAddress, const void *mainInst)
-{
-
- /* set shared-memory addresses in the DSP instance */
- inst->pw16_readAddress = (int16_t *) data2DspAddress;
- inst->pw16_writeAddress = (int16_t *) data2McuAddress;
-
- /* set pointer to main NetEQ instance */
- inst->main_inst = (void *) mainInst;
-
- /* set output frame size to 10 ms = 80 samples in narrowband */
- inst->millisecondsPerCall = 10;
- inst->timestampsPerCall = 80;
-
- return (0);
-
-}
-
-/****************************************************************************
- * NETEQDSP_clearInCallStats(...)
- *
- * Reset in-call statistics variables on DSP side.
- *
- * Input:
- * - inst : NetEQ DSP instance
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- */
-
-int WebRtcNetEQ_ClearInCallStats(DSPInst_t *inst)
-{
- /* Reset statistics counters */
- inst->statInst.accelerateLength = 0;
- inst->statInst.expandLength = 0;
- inst->statInst.preemptiveLength = 0;
- inst->statInst.addedSamples = 0;
- return (0);
-}
-
-/****************************************************************************
- * WebRtcNetEQ_ClearPostCallStats(...)
- *
- * Reset post-call statistics variables on DSP side.
- *
- * Input:
- * - inst : NetEQ DSP instance
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- */
-
-int WebRtcNetEQ_ClearPostCallStats(DSPInst_t *inst)
-{
-
- /* Reset statistics counters */
- inst->statInst.expandedVoiceSamples = 0;
- inst->statInst.expandedNoiseSamples = 0;
- return (0);
-}
-
-/****************************************************************************
- * WebRtcNetEQ_ClearActivityStats(...)
- *
- * Reset processing activity statistics.
- *
- * Input:
- * - inst : NetEQ DSP instance
- *
- * Output:
- * - inst : Updated instance
- *
- */
-
-void WebRtcNetEQ_ClearActivityStats(DSPInst_t *inst) {
- memset(&inst->activity_stats, 0, sizeof(ActivityStats));
-}
-
-#ifdef NETEQ_VAD
-
-/****************************************************************************
- * WebRtcNetEQ_InitVAD(...)
- *
- * Initializes post-decode VAD instance.
- *
- * Input:
- * - VADinst : PostDecodeVAD instance
- * - fs : Initial sample rate
- *
- * Output:
- * - VADinst : Updated instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_InitVAD(PostDecodeVAD_t *VADInst, uint16_t fs)
-{
-
- int res = 0;
-
- /* initially, disable the post-decode VAD */
- VADInst->VADEnabled = 0;
-
- if (VADInst->VADState != NULL /* if VAD state is provided */
- && VADInst->initFunction != NULL /* and all function ... */
- && VADInst->setmodeFunction != NULL /* ... pointers ... */
- && VADInst->VADFunction != NULL) /* ... are defined */
- {
- res = VADInst->initFunction( VADInst->VADState ); /* call VAD init function */
- res |= WebRtcNetEQ_SetVADModeInternal( VADInst, VADInst->VADMode );
-
- if (res!=0)
- {
- /* something is wrong; play it safe and set the VADstate to NULL */
- VADInst->VADState = NULL;
- }
- else if (fs<=16000)
- {
- /* enable VAD if NB or WB (VAD cannot handle SWB) */
- VADInst->VADEnabled = 1;
- }
- }
-
- /* reset SID/CNG interval counter */
- VADInst->SIDintervalCounter = 0;
-
- /* initialize with active-speaker decision */
- VADInst->VADDecision = 1;
-
- return(res);
-
-}
-
-/****************************************************************************
- * WebRtcNetEQ_SetVADModeInternal(...)
- *
- * Set the VAD mode in the VAD struct, and communicate it to the VAD instance
- * if it exists.
- *
- * Input:
- * - VADinst : PostDecodeVAD instance
- * - mode : Mode number passed on to the VAD function
- *
- * Output:
- * - VADinst : Updated instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_SetVADModeInternal(PostDecodeVAD_t *VADInst, int mode)
-{
-
- int res = 0;
-
- VADInst->VADMode = mode;
-
- if (VADInst->VADState != NULL)
- {
- /* call setmode function */
- res = VADInst->setmodeFunction(VADInst->VADState, mode);
- }
-
- return(res);
-
-}
-
-#endif /* NETEQ_VAD */
-
-/****************************************************************************
- * WebRtcNetEQ_FlushSpeechBuffer(...)
- *
- * Flush the speech buffer.
- *
- * Input:
- * - inst : NetEq DSP instance
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- * : non-zero - error
- */
-
-int WebRtcNetEQ_FlushSpeechBuffer(DSPInst_t *inst)
-{
- int16_t fs_mult;
-
- /* calcualte fs/8000 */
- fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
-
- /* clear buffer */
- WebRtcSpl_MemSetW16(inst->speechBuffer, 0, SPEECH_BUF_SIZE);
- inst->endPosition = 565 * fs_mult;
- inst->curPosition = inst->endPosition - inst->ExpandInst.w16_overlap;
-
- return 0;
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp.h
deleted file mode 100644
index 9371938d5f6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp.h
+++ /dev/null
@@ -1,807 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains some DSP initialization functions,
- * constant table definitions and other parameters.
- * Also contains definitions of all DSP-side data structures.
- */
-
-
-#ifndef DSP_H
-#define DSP_H
-
-#include "typedefs.h"
-
-#include "webrtc_cng.h"
-
-#include "codec_db_defines.h"
-#include "neteq_defines.h"
-#include "neteq_statistics.h"
-
-#ifdef NETEQ_ATEVENT_DECODE
-#include "dtmf_tonegen.h"
-#endif
-
-
-
-/*****************************/
-/* Pre-processor definitions */
-/*****************************/
-
-/* FSMULT is the sample rate divided by 8000 */
-#if defined(NETEQ_48KHZ_WIDEBAND)
- #define FSMULT 6
-#elif defined(NETEQ_32KHZ_WIDEBAND)
- #define FSMULT 4
-#elif defined(NETEQ_WIDEBAND)
- #define FSMULT 2
-#else
- #define FSMULT 1
-#endif
-
-/* Size of the speech buffer (or synchronization buffer). */
-/* 60 ms decoding + 10 ms syncbuff + 0.625ms lookahead */
-#define SPEECH_BUF_SIZE (565 * FSMULT)
-
-/* Misc definitions */
-#define BGN_LPC_ORDER (4 + FSMULT) /* 5, 6, 8, or 10 */
-#define UNVOICED_LPC_ORDER 6
-#define RANDVEC_NO_OF_SAMPLES 256
-
-/* Number of milliseconds to remove/add during accelerate/pre-emptive expand
- under BGNonly operation */
-#define DEFAULT_TIME_ADJUST 8
-
-/* Number of RecOut calls without CNG/SID before re-enabling post-decode VAD */
-#define POST_DECODE_VAD_AUTO_ENABLE 3000
-
-/* 8kHz windowing in Q15 (over 5 samples) */
-#define NETEQ_OVERLAP_WINMUTE_8KHZ_START 27307
-#define NETEQ_OVERLAP_WINMUTE_8KHZ_INC -5461
-#define NETEQ_OVERLAP_WINUNMUTE_8KHZ_START 5461
-#define NETEQ_OVERLAP_WINUNMUTE_8KHZ_INC 5461
-/* 16kHz windowing in Q15 (over 10 samples) */
-#define NETEQ_OVERLAP_WINMUTE_16KHZ_START 29789
-#define NETEQ_OVERLAP_WINMUTE_16KHZ_INC -2979
-#define NETEQ_OVERLAP_WINUNMUTE_16KHZ_START 2979
-#define NETEQ_OVERLAP_WINUNMUTE_16KHZ_INC 2979
-/* 32kHz windowing in Q15 (over 20 samples) */
-#define NETEQ_OVERLAP_WINMUTE_32KHZ_START 31208
-#define NETEQ_OVERLAP_WINMUTE_32KHZ_INC -1560
-#define NETEQ_OVERLAP_WINUNMUTE_32KHZ_START 1560
-#define NETEQ_OVERLAP_WINUNMUTE_32KHZ_INC 1560
-/* 48kHz windowing in Q15 (over 30 samples) */
-#define NETEQ_OVERLAP_WINMUTE_48KHZ_START 31711
-#define NETEQ_OVERLAP_WINMUTE_48KHZ_INC -1057
-#define NETEQ_OVERLAP_WINUNMUTE_48KHZ_START 1057
-#define NETEQ_OVERLAP_WINUNMUTE_48KHZ_INC 1057
-
-/* Fade BGN towards zero after this many Expand calls */
-#define FADE_BGN_TIME 200
-
-
-/*******************/
-/* Constant tables */
-/*******************/
-
-extern const int16_t WebRtcNetEQ_kDownsample8kHzTbl[];
-extern const int16_t WebRtcNetEQ_kDownsample16kHzTbl[];
-extern const int16_t WebRtcNetEQ_kDownsample32kHzTbl[];
-extern const int16_t WebRtcNetEQ_kDownsample48kHzTbl[];
-extern const int16_t WebRtcNetEQ_kRandnTbl[];
-extern const int16_t WebRtcNetEQ_kMixFractionFuncTbl[];
-extern const int16_t WebRtcNetEQ_k1049div[];
-extern const int16_t WebRtcNetEQ_k2097div[];
-extern const int16_t WebRtcNetEQ_k5243div[];
-
-
-
-/************/
-/* Typedefs */
-/************/
-
-enum BGNMode
-{
- BGN_ON, /* default "normal" behavior with eternal noise */
- BGN_FADE, /* noise fades to zero after some time */
- BGN_OFF /* background noise is always zero */
-};
-
-#ifdef NETEQ_STEREO
-enum MasterSlaveMode
-{
- NETEQ_MONO, /* stand-alone instance */
- NETEQ_MASTER, /* master instance in a spatial/stereo configuration */
- NETEQ_SLAVE /* slave instance in a spatial/stereo configuration */
-};
-
-enum MasterSlaveExtraInfo
-{
- NO_INFO, /* no info to convey */
- ACC_FAIL, /* signal that accelerate failed */
- PE_EXP_FAIL, /* signal that pre-emptive expand failed */
- DTMF_OVERDUB, /* signal that DTMF overdub is generated */
- DTMF_ONLY /* signal that DTMF only is played */
-};
-#endif
-
-/****************************/
-/* DSP-side data structures */
-/****************************/
-
-/* Background noise (BGN) instance for storing BGN parameters
- (sub-instance of NETEQDSP_inst) */
-typedef struct BGNInst_t_
-{
-
- int32_t w32_energy;
- int32_t w32_energyMax;
- int32_t w32_energyUpdate;
- int32_t w32_energyUpdateLow;
- int16_t pw16_filterState[BGN_LPC_ORDER];
- int16_t pw16_filter[BGN_LPC_ORDER + 1];
- int16_t w16_mutefactor;
- int16_t w16_scale;
- int16_t w16_scaleShift;
- int16_t w16_initialized;
- enum BGNMode bgnMode;
-
-} BGNInst_t;
-
-/* Expansion instance (sub-instance of NETEQDSP_inst) */
-typedef struct ExpandInst_t_
-{
-
- int16_t w16_overlap; /* Constant, 5 for NB and 10 for WB */
- int16_t w16_consecExp; /* Number of consecutive expand calls */
- int16_t *pw16_arFilter; /* length [UNVOICED_LPC_ORDER+1] */
- int16_t *pw16_arState; /* length [UNVOICED_LPC_ORDER] */
- int16_t w16_arGain;
- int16_t w16_arGainScale;
- int16_t w16_vFraction; /* Q14 */
- int16_t w16_currentVFraction; /* Q14 */
- int16_t *pw16_expVecs[2];
- int16_t w16_lags[3];
- int16_t w16_maxLag;
- int16_t *pw16_overlapVec; /* last samples of speech history */
- int16_t w16_lagsDirection;
- int16_t w16_lagsPosition;
- int16_t w16_expandMuteFactor; /* Q14 */
- int16_t w16_stopMuting;
- int16_t w16_onset;
- int16_t w16_muteSlope; /* Q20 */
-
-} ExpandInst_t;
-
-#ifdef NETEQ_VAD
-
-/*
- * VAD function pointer types, replicating the typedefs in webrtc_neteq_internal.h.
- * These function pointers match the definitions of WebRtc VAD functions WebRtcVad_Init,
- * WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in webrtc_vad.h.
- */
-typedef int (*VADInitFunction)(void *VAD_inst);
-typedef int (*VADSetmodeFunction)(void *VAD_inst, int mode);
-typedef int (*VADFunction)(void *VAD_inst, int fs, int16_t *frame,
- int frameLen);
-
-/* Post-decode VAD instance (sub-instance of NETEQDSP_inst) */
-typedef struct PostDecodeVAD_t_
-{
-
- void *VADState; /* pointer to a VAD instance */
-
- int16_t VADEnabled; /* 1 if enabled, 0 if disabled */
- int VADMode; /* mode parameter to pass to the VAD function */
- int VADDecision; /* 1 for active, 0 for passive */
- int16_t SIDintervalCounter; /* reset when decoding CNG/SID frame,
- increment for each recout call */
-
- /* Function pointers */
- VADInitFunction initFunction; /* VAD init function */
- VADSetmodeFunction setmodeFunction; /* VAD setmode function */
- VADFunction VADFunction; /* VAD function */
-
-} PostDecodeVAD_t;
-
-#endif /* NETEQ_VAD */
-
-#ifdef NETEQ_STEREO
-#define MAX_MS_DECODES 10
-
-typedef struct
-{
- /* Stand-alone, master, or slave */
- enum MasterSlaveMode msMode;
-
- enum MasterSlaveExtraInfo extraInfo;
-
- uint16_t instruction;
- int16_t distLag;
- int16_t corrLag;
- int16_t bestIndex;
-
- uint32_t endTimestamp;
- uint16_t samplesLeftWithOverlap;
-
-} MasterSlaveInfo;
-#endif
-
-
-/* "Main" NetEQ DSP instance */
-typedef struct DSPInst_t_
-{
-
- /* MCU/DSP Communication layer */
- int16_t *pw16_readAddress;
- int16_t *pw16_writeAddress;
- void *main_inst;
-
- /* Output frame size in ms and samples */
- int16_t millisecondsPerCall;
- int16_t timestampsPerCall;
-
- /*
- * Example of speech buffer
- *
- * -----------------------------------------------------------
- * | History T-60 to T | Future |
- * -----------------------------------------------------------
- * ^ ^
- * | |
- * curPosition endPosition
- *
- * History is gradually shifted out to the left when inserting
- * new data at the end.
- */
-
- int16_t speechBuffer[SPEECH_BUF_SIZE]; /* History/future speech buffer */
- int curPosition; /* Next sample to play */
- int endPosition; /* Position that ends future data */
- uint32_t endTimestamp; /* Timestamp value at end of future data */
- uint32_t videoSyncTimestamp; /* (Estimated) timestamp of the last
- played sample (usually same as
- endTimestamp-(endPosition-curPosition)
- except during Expand and CNG) */
- uint16_t fs; /* sample rate in Hz */
- int16_t w16_frameLen; /* decoder frame length in samples */
- int16_t w16_mode; /* operation used during last RecOut call */
- int16_t w16_muteFactor; /* speech mute factor in Q14 */
- int16_t *pw16_speechHistory; /* beginning of speech history during Expand */
- int16_t w16_speechHistoryLen; /* 256 for NB and 512 for WB */
-
- /* random noise seed parameters */
- int16_t w16_seedInc;
- uint32_t uw16_seed;
-
- /* VQmon related variable */
- int16_t w16_concealedTS;
-
- /*****************/
- /* Sub-instances */
- /*****************/
-
- /* Decoder data */
- CodecFuncInst_t codec_ptr_inst;
-
-#ifdef NETEQ_CNG_CODEC
- /* CNG "decoder" instance */
- CNG_dec_inst *CNG_Codec_inst;
-#endif /* NETEQ_CNG_CODEC */
-
-#ifdef NETEQ_ATEVENT_DECODE
- /* DTMF generator instance */
- dtmf_tone_inst_t DTMFInst;
-#endif /* NETEQ_CNG_CODEC */
-
-#ifdef NETEQ_VAD
- /* Post-decode VAD instance */
- PostDecodeVAD_t VADInst;
-#endif /* NETEQ_VAD */
-
- /* Expand instance (defined above) */
- ExpandInst_t ExpandInst;
-
- /* Background noise instance (defined above) */
- BGNInst_t BGNInst;
-
- /* Internal statistics instance */
- DSPStats_t statInst;
-
- /* Internal instance for short-term processing activity. */
- ActivityStats activity_stats;
-
-#ifdef NETEQ_STEREO
- /* Pointer to Master/Slave info */
- MasterSlaveInfo *msInfo;
-#endif
-
-} DSPInst_t;
-
-
-/*************************/
-/* Function declarations */
-/*************************/
-
-/****************************************************************************
- * WebRtcNetEQ_DSPInit(...)
- *
- * Initializes DSP side of NetEQ.
- *
- * Input:
- * - inst : NetEq DSP instance
- * - fs : Initial sample rate (may change when decoding data)
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- * : non-zero - error
- */
-
-int WebRtcNetEQ_DSPInit(DSPInst_t *inst, uint16_t fs);
-
-/****************************************************************************
- * WebRtcNetEQ_AddressInit(...)
- *
- * Initializes the shared-memory communication on the DSP side.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - data2McuAddress : Pointer to memory where DSP writes / MCU reads
- * - data2DspAddress : Pointer to memory where MCU writes / DSP reads
- * - mainInst : NetEQ main instance
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- */
-
-int WebRtcNetEQ_AddressInit(DSPInst_t *inst, const void *data2McuAddress,
- const void *data2DspAddress, const void *mainInst);
-
-/****************************************************************************
- * WebRtcNetEQ_ClearInCallStats(...)
- *
- * Reset in-call statistics variables on DSP side.
- *
- * Input:
- * - inst : NetEQ DSP instance
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- */
-
-int WebRtcNetEQ_ClearInCallStats(DSPInst_t *inst);
-
-/****************************************************************************
- * WebRtcNetEQ_ClearPostCallStats(...)
- *
- * Reset post-call statistics variables on DSP side.
- *
- * Input:
- * - inst : NetEQ DSP instance
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- */
-
-int WebRtcNetEQ_ClearPostCallStats(DSPInst_t *inst);
-
-/****************************************************************************
- * WebRtcNetEQ_ClearActivityStats(...)
- *
- * Reset processing activity statistics.
- *
- * Input:
- * - inst : NetEQ DSP instance
- *
- * Output:
- * - inst : Updated instance
- *
- */
-
-void WebRtcNetEQ_ClearActivityStats(DSPInst_t *inst);
-
-/****************************************************************************
- * WebRtcNetEQ_RecOutInternal(...)
- *
- * This function asks NetEQ for more speech/audio data.
- *
- * Input:
- * - inst : NetEQ instance, i.e. the user that requests more
- * speech/audio data.
- * - outdata : Pointer to a memory space where the output data
- * should be stored.
- * - BGNonly : If non-zero, RecOut will only produce background
- * noise. It will still draw packets from the packet
- * buffer, but they will never be decoded.
- * - av_sync : 1 if NetEQ is in AV-sync, 0 otherwise.
- *
- * Output:
- * - inst : Updated user information
- * - len : Number of samples that were outputted from NetEq
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RecOutInternal(DSPInst_t *inst, int16_t *pw16_outData,
- int16_t *pw16_len, int16_t BGNonly, int av_sync);
-
-/****************************************************************************
- * WebRtcNetEQ_Normal(...)
- *
- * This function has the possibility to modify data that is played out in Normal
- * mode, for example adjust the gain of the signal. The length of the signal
- * can not be changed.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - scratchPtr : Pointer to scratch vector
- * - decoded : Pointer to vector of new data from decoder
- * - len : Number of input samples
- *
- * Output:
- * - inst : Updated user information
- * - pw16_len : Pointer to varibale where the number of samples
- * produced will be written
- *
- * Return value : >=0 - Number of samples written to outData
- * -1 - Error
- */
-
-int WebRtcNetEQ_Normal(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_decoded, int16_t len,
- int16_t *pw16_outData, int16_t *pw16_len);
-
-/****************************************************************************
- * WebRtcNetEQ_Expand(...)
- *
- * This function produces one "chunk" of expansion data (PLC audio). The
- * lenght of the produced audio depends on the speech history.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - scratchPtr : Pointer to scratch vector
- * - BGNonly : If non-zero, Expand will only produce background
- * noise.
- * - pw16_len : Desired number of samples (only for BGN mode).
- *
- * Output:
- * - inst : Updated user information
- * - outdata : Pointer to a memory space where the output data
- * should be stored
- * - pw16_len : Number of samples that were outputted from NetEq
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_Expand(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_outData, int16_t *pw16_len,
- int16_t BGNonly);
-
-/****************************************************************************
- * WebRtcNetEQ_GenerateBGN(...)
- *
- * This function generates and writes len samples of background noise to the
- * output vector. The Expand function will be called repeteadly until the
- * correct number of samples is produced.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - scratchPtr : Pointer to scratch vector
- * - len : Desired length of produced BGN.
- *
- *
- * Output:
- * - pw16_outData : Pointer to a memory space where the output data
- * should be stored
- *
- * Return value : >=0 - Number of noise samples produced and written
- * to output
- * -1 - Error
- */
-
-int WebRtcNetEQ_GenerateBGN(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_outData, int16_t len);
-
-/****************************************************************************
- * WebRtcNetEQ_PreEmptiveExpand(...)
- *
- * This function tries to extend the audio data by repeating one or several
- * pitch periods. The operation is only carried out if the correlation is
- * strong or if the signal energy is very low. The algorithm is the
- * reciprocal of the Accelerate algorithm.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - scratchPtr : Pointer to scratch vector.
- * - decoded : Pointer to newly decoded speech.
- * - len : Length of decoded speech.
- * - oldDataLen : Length of the part of decoded that has already been played out.
- * - BGNonly : If non-zero, Pre-emptive Expand will only copy
- * the first DEFAULT_TIME_ADJUST seconds of the
- * input and append to the end. No signal matching is
- * done.
- *
- * Output:
- * - inst : Updated instance
- * - outData : Pointer to a memory space where the output data
- * should be stored. The vector must be at least
- * min(len + 120*fs/8000, NETEQ_MAX_OUTPUT_SIZE)
- * elements long.
- * - pw16_len : Number of samples written to outData.
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_PreEmptiveExpand(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- const int16_t *pw16_decoded, int len, int oldDataLen,
- int16_t *pw16_outData, int16_t *pw16_len,
- int16_t BGNonly);
-
-/****************************************************************************
- * WebRtcNetEQ_Accelerate(...)
- *
- * This function tries to shorten the audio data by removing one or several
- * pitch periods. The operation is only carried out if the correlation is
- * strong or if the signal energy is very low.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - scratchPtr : Pointer to scratch vector.
- * - decoded : Pointer to newly decoded speech.
- * - len : Length of decoded speech.
- * - BGNonly : If non-zero, Accelerate will only remove the last
- * DEFAULT_TIME_ADJUST seconds of the intput.
- * No signal matching is done.
- *
- *
- * Output:
- * - inst : Updated instance
- * - outData : Pointer to a memory space where the output data
- * should be stored
- * - pw16_len : Number of samples written to outData.
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_Accelerate(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- const int16_t *pw16_decoded, int len,
- int16_t *pw16_outData, int16_t *pw16_len,
- int16_t BGNonly);
-
-/****************************************************************************
- * WebRtcNetEQ_Merge(...)
- *
- * This function is used to merge new data from the decoder to the exisiting
- * stream in the synchronization buffer. The merge operation is typically
- * done after a packet loss, where the end of the expanded data does not
- * fit naturally with the new decoded data.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - scratchPtr : Pointer to scratch vector.
- * - decoded : Pointer to new decoded speech.
- * - len : Number of samples in pw16_decoded.
- *
- *
- * Output:
- * - inst : Updated user information
- * - outData : Pointer to a memory space where the output data
- * should be stored
- * - pw16_len : Number of samples written to pw16_outData
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_Merge(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_decoded, int len, int16_t *pw16_outData,
- int16_t *pw16_len);
-
-/****************************************************************************
- * WebRtcNetEQ_Cng(...)
- *
- * This function produces CNG according to RFC 3389
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - len : Number of samples to produce
- *
- * Output:
- * - pw16_outData : Output CNG
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-#ifdef NETEQ_CNG_CODEC
-/* Must compile NetEQ with CNG support to enable this function */
-
-int WebRtcNetEQ_Cng(DSPInst_t *inst, int16_t *pw16_outData, int len);
-
-#endif /* NETEQ_CNG_CODEC */
-
-/****************************************************************************
- * WebRtcNetEQ_BGNUpdate(...)
- *
- * This function updates the background noise parameter estimates.
- *
- * Input:
- * - inst : NetEQ instance, where the speech history is stored.
- * - scratchPtr : Pointer to scratch vector.
- *
- * Output:
- * - inst : Updated information about the BGN characteristics.
- *
- * Return value : No return value
- */
-
-void WebRtcNetEQ_BGNUpdate(
-#ifdef SCRATCH
- DSPInst_t *inst, int16_t *pw16_scratchPtr
-#else
- DSPInst_t *inst
-#endif
- );
-
-#ifdef NETEQ_VAD
-/* Functions used by post-decode VAD */
-
-/****************************************************************************
- * WebRtcNetEQ_InitVAD(...)
- *
- * Initializes post-decode VAD instance.
- *
- * Input:
- * - VADinst : PostDecodeVAD instance
- * - fs : Initial sample rate
- *
- * Output:
- * - VADinst : Updated instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_InitVAD(PostDecodeVAD_t *VADInst, uint16_t fs);
-
-/****************************************************************************
- * WebRtcNetEQ_SetVADModeInternal(...)
- *
- * Set the VAD mode in the VAD struct, and communicate it to the VAD instance
- * if it exists.
- *
- * Input:
- * - VADinst : PostDecodeVAD instance
- * - mode : Mode number passed on to the VAD function
- *
- * Output:
- * - VADinst : Updated instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_SetVADModeInternal(PostDecodeVAD_t *VADInst, int mode);
-
-#endif /* NETEQ_VAD */
-
-/****************************************************************************
- * WebRtcNetEQ_FlushSpeechBuffer(...)
- *
- * Flush the speech buffer.
- *
- * Input:
- * - inst : NetEq DSP instance
- *
- * Output:
- * - inst : Updated instance
- *
- * Return value : 0 - ok
- * : non-zero - error
- */
-
-int WebRtcNetEQ_FlushSpeechBuffer(DSPInst_t *inst);
-
-#ifndef WEBRTC_NETEQ_40BITACC_TEST
-
-#include "signal_processing_library.h"
-/* Map to regular SPL functions */
-#define WebRtcNetEQ_CrossCorr WebRtcSpl_CrossCorrelation
-#define WebRtcNetEQ_DotW16W16 WebRtcSpl_DotProductWithScale
-
-#else /* WEBRTC_NETEQ_40BITACC_TEST defined */
-/* Run NetEQ with simulated 40-bit accumulator to run bit-exact to a DSP
- implementation where the main (splib and NetEQ) functions have been
- 40-bit optimized. */
-
-/* Map to special 40-bit optimized functions, defined below */
-#define WebRtcNetEQ_CrossCorr WebRtcNetEQ_40BitAccCrossCorr
-#define WebRtcNetEQ_DotW16W16 WebRtcNetEQ_40BitAccDotW16W16
-
-/****************************************************************************
- * WebRtcNetEQ_40BitAccCrossCorr(...)
- *
- * Calculates the Cross correlation between two sequences seq1 and seq2. Seq1
- * is fixed and seq2 slides as the pointer is increased with step
- *
- * Input:
- * - seq1 : First sequence (fixed throughout the correlation)
- * - seq2 : Second sequence (slided step_seq2 for each
- * new correlation)
- * - dimSeq : Number of samples to use in the cross correlation.
- * Should be no larger than 1024 to avoid overflow.
- * - dimCrossCorr : Number of CrossCorrelations to calculate (start
- * position for seq2 is updated for each new one)
- * - rShift : Number of right shifts to use
- * - step_seq2 : How many (positive or negative) steps the seq2
- * pointer should be updated for each new cross
- * correlation value
- *
- * Output:
- * - crossCorr : The cross correlation in Q-rShift
- */
-
-void WebRtcNetEQ_40BitAccCrossCorr(int32_t *crossCorr, int16_t *seq1,
- int16_t *seq2, int16_t dimSeq,
- int16_t dimCrossCorr, int16_t rShift,
- int16_t step_seq2);
-
-/****************************************************************************
- * WebRtcNetEQ_40BitAccDotW16W16(...)
- *
- * Calculates the dot product between two vectors (int16_t)
- *
- * Input:
- * - vector1 : Vector 1
- * - vector2 : Vector 2
- * - len : Number of samples in vector
- * Should be no larger than 1024 to avoid overflow.
- * - scaling : The number of right shifts (after multiplication)
- * required to avoid overflow in the dot product.
- * Return value : The dot product
- */
-
-int32_t WebRtcNetEQ_40BitAccDotW16W16(int16_t *vector1, int16_t *vector2,
- int len, int scaling);
-
-#endif /* WEBRTC_NETEQ_40BITACC_TEST */
-
-#endif /* DSP_H */
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.cc
index e1aa0e53de7..7451ae26f8e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/dsp_helper.h"
+#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
#include <assert.h>
#include <string.h> // Access to memset.
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.h
index 60cd995d840..af4f4d6c88c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper.h
@@ -8,13 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DSP_HELPER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DSP_HELPER_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DSP_HELPER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DSP_HELPER_H_
#include <string.h> // Access to size_t.
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -133,4 +133,4 @@ class DspHelper {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DSP_HELPER_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DSP_HELPER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper_unittest.cc
index 852c2ec927c..cbceff61945 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dsp_helper_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helper_unittest.cc
@@ -8,10 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/dsp_helper.h"
+#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.c
deleted file mode 100644
index ef721d55997..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.c
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains some help functions that did not fit elsewhere.
- */
-
-#include "dsp_helpfunctions.h"
-
-
-int16_t WebRtcNetEQ_CalcFsMult(uint16_t fsHz)
-{
- switch (fsHz)
- {
- case 8000:
- {
- return 1;
- }
- case 16000:
- {
- return 2;
- }
- case 32000:
- {
- return 4;
- }
- case 48000:
- {
- return 6;
- }
- default:
- {
- return 1;
- }
- }
-}
-
-
-int WebRtcNetEQ_DownSampleTo4kHz(const int16_t *in, int inLen, uint16_t inFsHz,
- int16_t *out, int outLen, int compensateDelay)
-{
- int16_t *B; /* filter coefficients */
- int16_t Blen; /* number of coefficients */
- int16_t filterDelay; /* phase delay in samples */
- int16_t factor; /* conversion rate (inFsHz/8000) */
- int ok;
-
- /* Set constants depending on frequency used */
- /* NOTE: The phase delay values are wrong compared to the true phase delay
- of the filters. However, the error is preserved (through the +1 term)
- for consistency. */
- switch (inFsHz)
- {
- case 8000:
- {
- Blen = 3;
- factor = 2;
- B = (int16_t*) WebRtcNetEQ_kDownsample8kHzTbl;
- filterDelay = 1 + 1;
- break;
- }
-#ifdef NETEQ_WIDEBAND
- case 16000:
- {
- Blen = 5;
- factor = 4;
- B = (int16_t*) WebRtcNetEQ_kDownsample16kHzTbl;
- filterDelay = 2 + 1;
- break;
- }
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- case 32000:
- {
- Blen = 7;
- factor = 8;
- B = (int16_t*) WebRtcNetEQ_kDownsample32kHzTbl;
- filterDelay = 3 + 1;
- break;
- }
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- case 48000:
- {
- Blen = 7;
- factor = 12;
- B = (int16_t*) WebRtcNetEQ_kDownsample48kHzTbl;
- filterDelay = 3 + 1;
- break;
- }
-#endif
- default:
- {
- /* unsupported or wrong sample rate */
- return -1;
- }
- }
-
- if (!compensateDelay)
- {
- /* disregard delay compensation */
- filterDelay = 0;
- }
-
- ok = WebRtcSpl_DownsampleFast((int16_t*) &in[Blen - 1],
- (int16_t) (inLen - (Blen - 1)), /* number of input samples */
- out, (int16_t) outLen, /* number of output samples to produce */
- B, Blen, factor, filterDelay); /* filter parameters */
-
- return ok; /* return value is -1 if input signal is too short */
-
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.h
deleted file mode 100644
index 11119f1b825..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.h
+++ /dev/null
@@ -1,220 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Various help functions used by the DSP functions.
- */
-
-#ifndef DSP_HELPFUNCTIONS_H
-#define DSP_HELPFUNCTIONS_H
-
-#include "typedefs.h"
-
-#include "dsp.h"
-
-/****************************************************************************
- * WebRtcNetEQ_Correlator(...)
- *
- * Calculate signal correlation.
- *
- * Input:
- * - inst : DSP instance
- * - data : Speech history to do expand from (older history in data[-4..-1])
- * - dataLen : Length of data
- *
- * Output:
- * - corrOut : CC of downsampled signal
- * - corrScale : Scale factor for correlation (-Qdomain)
- *
- * Return value : Length of correlated data
- */
-
-int16_t WebRtcNetEQ_Correlator(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_data, int16_t w16_dataLen,
- int16_t *pw16_corrOut,
- int16_t *pw16_corrScale);
-
-/****************************************************************************
- * WebRtcNetEQ_PeakDetection(...)
- *
- * Peak detection with parabolic fit.
- *
- * Input:
- * - data : Data sequence for peak detection
- * - dataLen : Length of data
- * - nmbPeaks : Number of peaks to detect
- * - fs_mult : Sample rate multiplier
- *
- * Output:
- * - corrIndex : Index of the peak
- * - winner : Value of the peak
- *
- * Return value : 0 for ok
- */
-
-int16_t WebRtcNetEQ_PeakDetection(int16_t *pw16_data, int16_t w16_dataLen,
- int16_t w16_nmbPeaks, int16_t fs_mult,
- int16_t *pw16_corrIndex,
- int16_t *pw16_winners);
-
-/****************************************************************************
- * WebRtcNetEQ_PrblFit(...)
- *
- * Three-point parbola fit.
- *
- * Input:
- * - 3pts : Three input samples
- * - fs_mult : Sample rate multiplier
- *
- * Output:
- * - Ind : Index of the peak
- * - outVal : Value of the peak
- *
- * Return value : 0 for ok
- */
-
-int16_t WebRtcNetEQ_PrblFit(int16_t *pw16_3pts, int16_t *pw16_Ind,
- int16_t *pw16_outVal, int16_t fs_mult);
-
-/****************************************************************************
- * WebRtcNetEQ_MinDistortion(...)
- *
- * Find the lag that results in minimum distortion.
- *
- * Input:
- * - data : Start of speech to perform distortion on, second vector is assumed
- * to be data[-Lag]
- * - minLag : Start lag
- * - maxLag : End lag
- * - len : Length to correlate
- *
- * Output:
- * - dist : Distorion value
- *
- * Return value : Lag for minimum distortion
- */
-
-int16_t WebRtcNetEQ_MinDistortion(const int16_t *pw16_data,
- int16_t w16_minLag, int16_t w16_maxLag,
- int16_t len, int32_t *pw16_dist);
-
-/****************************************************************************
- * WebRtcNetEQ_RandomVec(...)
- *
- * Generate random vector.
- *
- * Input:
- * - seed : Current seed (input/output)
- * - len : Number of samples to generate
- * - incVal : Jump step
- *
- * Output:
- * - randVec : Generated random vector
- */
-
-void WebRtcNetEQ_RandomVec(uint32_t *w32_seed, int16_t *pw16_randVec,
- int16_t w16_len, int16_t w16_incval);
-
-/****************************************************************************
- * WebRtcNetEQ_MixVoiceUnvoice(...)
- *
- * Mix voiced and unvoiced signal.
- *
- * Input:
- * - voicedVec : Voiced input signal
- * - unvoicedVec : Unvoiced input signal
- * - current_vfraction : Current mixing factor
- * - vfraction_change : Mixing factor change per sample
- * - N : Number of samples
- *
- * Output:
- * - outData : Mixed signal
- */
-
-void WebRtcNetEQ_MixVoiceUnvoice(int16_t *pw16_outData, int16_t *pw16_voicedVec,
- int16_t *pw16_unvoicedVec,
- int16_t *w16_current_vfraction,
- int16_t w16_vfraction_change, int16_t N);
-
-/****************************************************************************
- * WebRtcNetEQ_UnmuteSignal(...)
- *
- * Gradually reduce attenuation.
- *
- * Input:
- * - inVec : Input signal
- * - startMuteFact : Starting attenuation
- * - unmuteFact : Factor to "unmute" with (Q20)
- * - N : Number of samples
- *
- * Output:
- * - outVec : Output signal
- */
-
-void WebRtcNetEQ_UnmuteSignal(int16_t *pw16_inVec, int16_t *startMuteFact,
- int16_t *pw16_outVec, int16_t unmuteFact,
- int16_t N);
-
-/****************************************************************************
- * WebRtcNetEQ_MuteSignal(...)
- *
- * Gradually increase attenuation.
- *
- * Input:
- * - inout : Input/output signal
- * - muteSlope : Slope of muting
- * - N : Number of samples
- */
-
-void WebRtcNetEQ_MuteSignal(int16_t *pw16_inout, int16_t muteSlope,
- int16_t N);
-
-/****************************************************************************
- * WebRtcNetEQ_CalcFsMult(...)
- *
- * Calculate the sample rate divided by 8000.
- *
- * Input:
- * - fsHz : Sample rate in Hz in {8000, 16000, 32000, 48000}.
- *
- * Return value : fsHz/8000 for the valid values, 1 for other inputs
- */
-
-int16_t WebRtcNetEQ_CalcFsMult(uint16_t fsHz);
-
-/****************************************************************************
- * WebRtcNetEQ_DownSampleTo4kHz(...)
- *
- * Lowpass filter and downsample a signal to 4 kHz sample rate.
- *
- * Input:
- * - in : Input signal samples.
- * - inLen : Number of input samples.
- * - inFsHz : Input sample rate in Hz.
- * - outLen : Desired number of samples in decimated signal.
- * - compensateDelay : If non-zero, compensate for the phase delay of
- * of the anti-alias filter.
- *
- * Output:
- * - out : Output signal samples.
- *
- * Return value : 0 - Ok
- * -1 - Error
- *
- */
-
-int WebRtcNetEQ_DownSampleTo4kHz(const int16_t *in, int inLen, uint16_t inFsHz,
- int16_t *out, int outLen, int compensateDelay);
-
-#endif
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.c
deleted file mode 100644
index 1788635c7df..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.c
+++ /dev/null
@@ -1,232 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Implementation of packet buffer for DTMF messages.
- */
-
-#include "dtmf_buffer.h"
-
-#include "typedefs.h" /* to define endianness */
-#include "signal_processing_library.h"
-
-#include "neteq_error_codes.h"
-
-
-#ifdef NETEQ_ATEVENT_DECODE
-
-int16_t WebRtcNetEQ_DtmfRemoveEvent(dtmf_inst_t *DTMFdec_inst)
-{
-
- int i;
- for (i = 0; i < 3; i++)
- {
- DTMFdec_inst->EventQueue[i] = DTMFdec_inst->EventQueue[i + 1];
- DTMFdec_inst->EventQueueVolume[i] = DTMFdec_inst->EventQueueVolume[i + 1];
- DTMFdec_inst->EventQueueEnded[i] = DTMFdec_inst->EventQueueEnded[i + 1];
- DTMFdec_inst->EventQueueStartTime[i] = DTMFdec_inst->EventQueueStartTime[i + 1];
- DTMFdec_inst->EventQueueEndTime[i] = DTMFdec_inst->EventQueueEndTime[i + 1];
- }
- DTMFdec_inst->EventBufferSize--;
- DTMFdec_inst->EventQueue[3] = -1;
- DTMFdec_inst->EventQueueVolume[3] = 0;
- DTMFdec_inst->EventQueueEnded[3] = 0;
- DTMFdec_inst->EventQueueStartTime[3] = 0;
- DTMFdec_inst->EventQueueEndTime[3] = 0;
-
- return 0;
-}
-
-int16_t WebRtcNetEQ_DtmfDecoderInit(dtmf_inst_t *DTMFdec_inst, uint16_t fs,
- int16_t MaxPLCtime)
-{
- int i;
- if (((fs != 8000) && (fs != 16000) && (fs != 32000) && (fs != 48000)) || (MaxPLCtime < 0))
- {
- return DTMF_DEC_PARAMETER_ERROR;
- }
- if (fs == 8000)
- DTMFdec_inst->framelen = 80;
- else if (fs == 16000)
- DTMFdec_inst->framelen = 160;
- else if (fs == 32000)
- DTMFdec_inst->framelen = 320;
- else
- /* fs == 48000 */
- DTMFdec_inst->framelen = 480;
-
- DTMFdec_inst->MaxPLCtime = MaxPLCtime;
- DTMFdec_inst->CurrentPLCtime = 0;
- DTMFdec_inst->EventBufferSize = 0;
- for (i = 0; i < 4; i++)
- {
- DTMFdec_inst->EventQueue[i] = -1;
- DTMFdec_inst->EventQueueVolume[i] = 0;
- DTMFdec_inst->EventQueueEnded[i] = 0;
- DTMFdec_inst->EventQueueStartTime[i] = 0;
- DTMFdec_inst->EventQueueEndTime[i] = 0;
- }
- return 0;
-}
-
-int16_t WebRtcNetEQ_DtmfInsertEvent(dtmf_inst_t *DTMFdec_inst,
- const int16_t *encoded, int16_t len,
- uint32_t timeStamp)
-{
-
- int i;
- int16_t value;
- const int16_t *EventStart;
- int16_t endEvent;
- int16_t Volume;
- int16_t Duration;
- int16_t position = -1;
-
- /* Extract event */
- if (len == 4)
- {
- EventStart = encoded;
-#ifdef WEBRTC_ARCH_BIG_ENDIAN
- value=((*EventStart)>>8);
- endEvent=((*EventStart)&0x80)>>7;
- Volume=((*EventStart)&0x3F);
- Duration=EventStart[1];
-#else
- value = ((*EventStart) & 0xFF);
- endEvent = ((*EventStart) & 0x8000) >> 15;
- Volume = ((*EventStart) & 0x3F00) >> 8;
- Duration = (((((uint16_t) EventStart[1]) >> 8) & 0xFF)
- | (((uint16_t) (EventStart[1] & 0xFF)) << 8));
-#endif
- /* Only events between 0-15 are supported (DTMF tones) */
- if ((value < 0) || (value > 15))
- {
- return 0;
- }
-
- /* Discard all DTMF tones with really low volume (<-36dbm0) */
- if (Volume > 36)
- {
- return 0;
- }
-
- /*Are there any unended events of the same type? */
- for (i = 0; i < DTMFdec_inst->EventBufferSize; i++)
- {
- /* Going through the whole queue even when we have found a match will
- ensure that we add to the latest applicable event */
- if ((DTMFdec_inst->EventQueue[i] == value) && (!DTMFdec_inst->EventQueueEnded[i]
- || endEvent)) position = i;
- }
- if (position > -1)
- {
- DTMFdec_inst->EventQueueVolume[position] = Volume;
- if ((timeStamp + Duration) > DTMFdec_inst->EventQueueEndTime[position]) DTMFdec_inst->EventQueueEndTime[position]
- = DTMFdec_inst->EventQueueStartTime[position] + Duration;
- if (endEvent) DTMFdec_inst->EventQueueEnded[position] = 1;
- }
- else
- {
- if (DTMFdec_inst->EventBufferSize == MAX_DTMF_QUEUE_SIZE)
- { /* Buffer full */
- /* Remove one event */
- DTMFdec_inst->EventBufferSize--;
- }
- /* Store data in the instance on a new position*/
- DTMFdec_inst->EventQueue[DTMFdec_inst->EventBufferSize] = value;
- DTMFdec_inst->EventQueueVolume[DTMFdec_inst->EventBufferSize] = Volume;
- DTMFdec_inst->EventQueueEnded[DTMFdec_inst->EventBufferSize] = endEvent;
- DTMFdec_inst->EventQueueStartTime[DTMFdec_inst->EventBufferSize] = timeStamp;
- DTMFdec_inst->EventQueueEndTime[DTMFdec_inst->EventBufferSize] = timeStamp
- + Duration;
- DTMFdec_inst->EventBufferSize++;
- }
- return 0;
- }
- return DTMF_INSERT_ERROR;
-}
-
-int16_t WebRtcNetEQ_DtmfDecode(dtmf_inst_t *DTMFdec_inst, int16_t *event,
- int16_t *volume, uint32_t currTimeStamp)
-{
-
- if (DTMFdec_inst->EventBufferSize < 1) return 0; /* No events to play */
-
- /* We have events, is it time to play them? */
- if (currTimeStamp < DTMFdec_inst->EventQueueStartTime[0])
- {
- /*No, just return zero */
- return 0;
- }
-
- /* Continue on the event that is currently ongoing */
- *event = DTMFdec_inst->EventQueue[0];
- *volume = DTMFdec_inst->EventQueueVolume[0];
-
- if (DTMFdec_inst->EventQueueEndTime[0] >= (currTimeStamp + DTMFdec_inst->framelen))
- {
-
- /* Still at least framLen to play */
-
- DTMFdec_inst->CurrentPLCtime = 0;
- if ((DTMFdec_inst->EventQueueEndTime[0] == (currTimeStamp + DTMFdec_inst->framelen))
- && (DTMFdec_inst->EventQueueEnded[0]))
- { /* We are done */
- /*Remove the event from Queue*/
- WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
- }
- return DTMFdec_inst->framelen;
-
- }
- else
- {
- if ((DTMFdec_inst->EventQueueEnded[0]) || (DTMFdec_inst->EventQueue[1] > -1))
- {
- /*
- * Less than frameLen to play and end of event or already received next event.
- * Give our a whole frame size of audio to simplify things.
- */
-
- /*Remove the event from Queue*/
- WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
- DTMFdec_inst->CurrentPLCtime = 0;
-
- return DTMFdec_inst->framelen;
-
- }
- else
- {
- /* Less than frameLen to play and not end of event. */
- DTMFdec_inst->CurrentPLCtime = (int16_t) (currTimeStamp
- - DTMFdec_inst->EventQueueEndTime[0]);
-
- if ((DTMFdec_inst->CurrentPLCtime > DTMFdec_inst->MaxPLCtime)
- || (DTMFdec_inst->CurrentPLCtime < -DTMFdec_inst->MaxPLCtime))
- {
- /*Remove the event from queue*/
- WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
- DTMFdec_inst->CurrentPLCtime = 0;
- }
-
- /* If we have a new event that it's time to play */
- if ((DTMFdec_inst->EventQueue[1] > -1) && (DTMFdec_inst->EventQueueStartTime[1]
- >= (currTimeStamp + DTMFdec_inst->framelen)))
- {
- /*Remove the event from queue*/
- WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
- DTMFdec_inst->CurrentPLCtime = 0;
- }
-
- return DTMFdec_inst->framelen;
- }
- }
-}
-
-#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.cc
index 1c81ad940c4..91debee14e8 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/dtmf_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/dtmf_buffer.h"
#include <assert.h>
#include <algorithm> // max
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.h
index 99c9e6a499b..5dd31c2d2e7 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -8,94 +8,109 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-/*
- * Packet buffer for DTMF messages.
- */
-
-#ifndef DTMF_BUFFER_H
-#define DTMF_BUFFER_H
-
-#include "typedefs.h"
-
-#include "neteq_defines.h"
-
-/* Include this code only if ATEVENT (DTMF) is defined in */
-#ifdef NETEQ_ATEVENT_DECODE
-
-#define MAX_DTMF_QUEUE_SIZE 4
-
-typedef struct dtmf_inst_t_
-{
- int16_t MaxPLCtime;
- int16_t CurrentPLCtime;
- int16_t EventQueue[MAX_DTMF_QUEUE_SIZE];
- int16_t EventQueueVolume[MAX_DTMF_QUEUE_SIZE];
- int16_t EventQueueEnded[MAX_DTMF_QUEUE_SIZE];
- uint32_t EventQueueStartTime[MAX_DTMF_QUEUE_SIZE];
- uint32_t EventQueueEndTime[MAX_DTMF_QUEUE_SIZE];
- int16_t EventBufferSize;
- int16_t framelen;
-} dtmf_inst_t;
-
-/****************************************************************************
- * WebRtcNetEQ_DtmfDecoderInit(...)
- *
- * This function initializes a DTMF instance.
- *
- * Input:
- * - DTMF_decinst_t : DTMF instance
- * - fs : The sample rate used for the DTMF
- * - MaxPLCtime : Maximum length for a PLC before zeros should be inserted
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int16_t WebRtcNetEQ_DtmfDecoderInit(dtmf_inst_t *DTMFdec_inst, uint16_t fs,
- int16_t MaxPLCtime);
-
-/****************************************************************************
- * WebRtcNetEQ_DtmfInsertEvent(...)
- *
- * This function decodes a packet with DTMF frames.
- *
- * Input:
- * - DTMFdec_inst : DTMF instance
- * - encoded : Encoded DTMF frame(s)
- * - len : Bytes in encoded vector
- *
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int16_t WebRtcNetEQ_DtmfInsertEvent(dtmf_inst_t *DTMFdec_inst,
- const int16_t *encoded, int16_t len,
- uint32_t timeStamp);
-
-/****************************************************************************
- * WebRtcNetEQ_DtmfDecode(...)
- *
- * This function decodes a packet with DTMF frame(s). Output will be the
- * event that should be played for next 10 ms.
- *
- * Input:
- * - DTMFdec_inst : DTMF instance
- * - currTimeStamp : The current playout timestamp
- *
- * Output:
- * - event : Event number to be played
- * - volume : Event volume to be played
- *
- * Return value : >0 - There is a event to be played
- * 0 - No event to be played
- * -1 - Error
- */
-
-int16_t WebRtcNetEQ_DtmfDecode(dtmf_inst_t *DTMFdec_inst, int16_t *event,
- int16_t *volume, uint32_t currTimeStamp);
-
-#endif /* NETEQ_ATEVENT_DECODE */
-
-#endif /* DTMF_BUFFER_H */
-
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DTMF_BUFFER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DTMF_BUFFER_H_
+
+#include <list>
+#include <string> // size_t
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+struct DtmfEvent {
+ uint32_t timestamp;
+ int event_no;
+ int volume;
+ int duration;
+ bool end_bit;
+
+ // Constructors
+ DtmfEvent()
+ : timestamp(0),
+ event_no(0),
+ volume(0),
+ duration(0),
+ end_bit(false) {
+ }
+ DtmfEvent(uint32_t ts, int ev, int vol, int dur, bool end)
+ : timestamp(ts),
+ event_no(ev),
+ volume(vol),
+ duration(dur),
+ end_bit(end) {
+ }
+};
+
+// This is the buffer holding DTMF events while waiting for them to be played.
+class DtmfBuffer {
+ public:
+ enum BufferReturnCodes {
+ kOK = 0,
+ kInvalidPointer,
+ kPayloadTooShort,
+ kInvalidEventParameters,
+ kInvalidSampleRate
+ };
+
+ // Set up the buffer for use at sample rate |fs_hz|.
+ explicit DtmfBuffer(int fs_hz) {
+ SetSampleRate(fs_hz);
+ }
+
+ virtual ~DtmfBuffer() {}
+
+ // Flushes the buffer.
+ virtual void Flush() { buffer_.clear(); }
+
+ // Static method to parse 4 bytes from |payload| as a DTMF event (RFC 4733)
+ // and write the parsed information into the struct |event|. Input variable
+ // |rtp_timestamp| is simply copied into the struct.
+ static int ParseEvent(uint32_t rtp_timestamp,
+ const uint8_t* payload,
+ int payload_length_bytes,
+ DtmfEvent* event);
+
+ // Inserts |event| into the buffer. The method looks for a matching event and
+ // merges the two if a match is found.
+ virtual int InsertEvent(const DtmfEvent& event);
+
+ // Checks if a DTMF event should be played at time |current_timestamp|. If so,
+ // the method returns true; otherwise false. The parameters of the event to
+ // play will be written to |event|.
+ virtual bool GetEvent(uint32_t current_timestamp, DtmfEvent* event);
+
+ // Number of events in the buffer.
+ virtual size_t Length() const { return buffer_.size(); }
+
+ virtual bool Empty() const { return buffer_.empty(); }
+
+ // Set a new sample rate.
+ virtual int SetSampleRate(int fs_hz);
+
+ private:
+ typedef std::list<DtmfEvent> DtmfList;
+
+ int max_extrapolation_samples_;
+ int frame_len_samples_; // TODO(hlundin): Remove this later.
+
+ // Compares two events and returns true if they are the same.
+ static bool SameEvent(const DtmfEvent& a, const DtmfEvent& b);
+
+ // Merges |event| to the event pointed out by |it|. The method checks that
+ // the two events are the same (using the SameEvent method), and merges them
+ // if that was the case, returning true. If the events are not the same, false
+ // is returned.
+ bool MergeEvents(DtmfList::iterator it, const DtmfEvent& event);
+
+ // Method used by the sort algorithm to rank events in the buffer.
+ static bool CompareEvents(const DtmfEvent& a, const DtmfEvent& b);
+
+ DtmfList buffer_;
+
+ DISALLOW_COPY_AND_ASSIGN(DtmfBuffer);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DTMF_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer_unittest.cc
index 0b5ed65b8e0..83f981386b7 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_buffer_unittest.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/dtmf_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/dtmf_buffer.h"
#ifdef WIN32
#include <winsock2.h> // ntohl()
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.cc
index c85534e9b7f..34c615d70f6 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.cc
@@ -28,7 +28,7 @@
// 852 Hz 7 8 9 14
// 941 Hz 10 0 11 15
-#include "webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h"
+#include "webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h"
#include <assert.h>
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h
index e93f0b883f5..fc1e5e4ad3d 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DTMF_TONE_GENERATOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DTMF_TONE_GENERATOR_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_DTMF_TONE_GENERATOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_DTMF_TONE_GENERATOR_H_
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -53,4 +53,4 @@ class DtmfToneGenerator {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DTMF_TONE_GENERATOR_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_DTMF_TONE_GENERATOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator_unittest.cc
index 37e8bbda96c..94f79dc3456 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_tone_generator_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tone_generator_unittest.cc
@@ -10,12 +10,12 @@
// Unit tests for DtmfToneGenerator class.
-#include "webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h"
+#include "webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h"
#include <math.h>
#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tonegen.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tonegen.c
deleted file mode 100644
index 8ea413c76f1..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tonegen.c
+++ /dev/null
@@ -1,367 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the DTMF tone generator and its parameters.
- *
- * A sinusoid is generated using the recursive oscillator model
- *
- * y[n] = sin(w*n + phi) = 2*cos(w) * y[n-1] - y[n-2]
- * = a * y[n-1] - y[n-2]
- *
- * initialized with
- * y[-2] = 0
- * y[-1] = sin(w)
- *
- * A DTMF signal is a combination of two sinusoids, depending
- * on which event is sent (i.e, which key is pressed). The following
- * table maps each key (event codes in parentheses) into two tones:
- *
- * 1209 Hz 1336 Hz 1477 Hz 1633 Hz
- * 697 Hz 1 (ev. 1) 2 (ev. 2) 3 (ev. 3) A (ev. 12)
- * 770 Hz 4 (ev. 4) 5 (ev. 5) 6 (ev. 6) B (ev. 13)
- * 852 Hz 7 (ev. 7) 8 (ev. 8) 9 (ev. 9) C (ev. 14)
- * 941 Hz * (ev. 10) 0 (ev. 0) # (ev. 11) D (ev. 15)
- *
- * The two tones are added to form the DTMF signal.
- *
- */
-
-#include "dtmf_tonegen.h"
-
-#include "signal_processing_library.h"
-
-#include "neteq_error_codes.h"
-
-#ifdef NETEQ_ATEVENT_DECODE
-/* Must compile NetEQ with DTMF support to enable the functionality */
-
-/*******************/
-/* Constant tables */
-/*******************/
-
-/*
- * All tables corresponding to the oscillator model are organized so that
- * the coefficients for a specific frequency is found in the same position
- * in every table. The positions for the tones follow this layout:
- *
- * dummyVector[8] =
- * {
- * 697 Hz, 770 Hz, 852 Hz, 941 Hz,
- * 1209 Hz, 1336 Hz, 1477 Hz, 1633 Hz
- * };
- */
-
-/*
- * Tables for the constant a = 2*cos(w) = 2*cos(2*pi*f/fs)
- * in the oscillator model, for 8, 16, 32 and 48 kHz sample rate.
- * Table values in Q14.
- */
-
-const int16_t WebRtcNetEQ_dtfm_aTbl8Khz[8] =
-{
- 27980, 26956, 25701, 24219,
- 19073, 16325, 13085, 9315
-};
-
-#ifdef NETEQ_WIDEBAND
-const int16_t WebRtcNetEQ_dtfm_aTbl16Khz[8]=
-{
- 31548, 31281, 30951, 30556,
- 29144, 28361, 27409, 26258
-};
-#endif
-
-#ifdef NETEQ_32KHZ_WIDEBAND
-const int16_t WebRtcNetEQ_dtfm_aTbl32Khz[8]=
-{
- 32462, 32394, 32311, 32210,
- 31849, 31647, 31400, 31098
-};
-#endif
-
-#ifdef NETEQ_48KHZ_WIDEBAND
-const int16_t WebRtcNetEQ_dtfm_aTbl48Khz[8]=
-{
- 32632, 32602, 32564, 32520,
- 32359, 32268, 32157, 32022
-};
-#endif
-
-/*
- * Initialization values y[-1] = sin(w) = sin(2*pi*f/fs), for 8, 16, 32 and 48 kHz sample rate.
- * Table values in Q14.
- */
-
-const int16_t WebRtcNetEQ_dtfm_yInitTab8Khz[8] =
-{
- 8528, 9315, 10163, 11036,
- 13323, 14206,15021, 15708
-};
-
-#ifdef NETEQ_WIDEBAND
-const int16_t WebRtcNetEQ_dtfm_yInitTab16Khz[8]=
-{
- 4429, 4879, 5380, 5918,
- 7490, 8207, 8979, 9801
-};
-#endif
-
-#ifdef NETEQ_32KHZ_WIDEBAND
-const int16_t WebRtcNetEQ_dtfm_yInitTab32Khz[8]=
-{
- 2235, 2468, 2728, 3010,
- 3853, 4249, 4685, 5164
-};
-#endif
-
-#ifdef NETEQ_48KHZ_WIDEBAND
-const int16_t WebRtcNetEQ_dtfm_yInitTab48Khz[8]=
-{
- 1493, 1649, 1823, 2013,
- 2582, 2851, 3148, 3476
-};
-#endif
-
-/* Volume in dBm0 from 0 to -63, where 0 is the first table entry.
- Everything below -36 is discarded, wherefore the table stops at -36.
- Table entries are in Q14.
- */
-
-const int16_t WebRtcNetEQ_dtfm_dBm0[37] = { 16141, 14386, 12821, 11427, 10184, 9077, 8090,
- 7210, 6426, 5727, 5104, 4549, 4054, 3614,
- 3221, 2870, 2558, 2280, 2032, 1811, 1614,
- 1439, 1282, 1143, 1018, 908, 809, 721, 643,
- 573, 510, 455, 405, 361, 322, 287, 256 };
-
-/****************************************************************************
- * WebRtcNetEQ_DTMFGenerate(...)
- *
- * Generate 10 ms DTMF signal according to input parameters.
- *
- * Input:
- * - DTMFdecInst : DTMF instance
- * - value : DTMF event number (0-15)
- * - volume : Volume of generated signal (0-36)
- * Volume is given in negative dBm0, i.e., volume == 0
- * means 0 dBm0 while volume == 36 mean -36 dBm0.
- * - sampFreq : Sample rate in Hz
- *
- * Output:
- * - signal : Pointer to vector where DTMF signal is stored;
- * Vector must be at least sampFreq/100 samples long.
- * - DTMFdecInst : Updated DTMF instance
- *
- * Return value : >0 - Number of samples written to signal
- * : <0 - error
- */
-
-int16_t WebRtcNetEQ_DTMFGenerate(dtmf_tone_inst_t *DTMFdecInst, int16_t value,
- int16_t volume, int16_t *signal,
- uint16_t sampFreq, int16_t extFrameLen)
-{
- const int16_t *aTbl; /* pointer to a-coefficient table */
- const int16_t *yInitTable; /* pointer to initialization value table */
- int16_t a1 = 0; /* a-coefficient for first tone (low tone) */
- int16_t a2 = 0; /* a-coefficient for second tone (high tone) */
- int i;
- int frameLen; /* number of samples to generate */
- int lowIndex = 0; /* Default to avoid compiler warnings. */
- int highIndex = 4; /* Default to avoid compiler warnings. */
- int32_t tempVal;
- int16_t tempValLow;
- int16_t tempValHigh;
-
- /* Sanity check for volume */
- if ((volume < 0) || (volume > 36))
- {
- return DTMF_DEC_PARAMETER_ERROR;
- }
-
- /* Sanity check for extFrameLen */
- if (extFrameLen < -1)
- {
- return DTMF_DEC_PARAMETER_ERROR;
- }
-
- /* Select oscillator coefficient tables based on sample rate */
- if (sampFreq == 8000)
- {
- aTbl = WebRtcNetEQ_dtfm_aTbl8Khz;
- yInitTable = WebRtcNetEQ_dtfm_yInitTab8Khz;
- frameLen = 80;
-#ifdef NETEQ_WIDEBAND
- }
- else if (sampFreq == 16000)
- {
- aTbl = WebRtcNetEQ_dtfm_aTbl16Khz;
- yInitTable = WebRtcNetEQ_dtfm_yInitTab16Khz;
- frameLen = 160;
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- }
- else if (sampFreq == 32000)
- {
- aTbl = WebRtcNetEQ_dtfm_aTbl32Khz;
- yInitTable = WebRtcNetEQ_dtfm_yInitTab32Khz;
- frameLen = 320;
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- }
- else if (sampFreq == 48000)
- {
- aTbl = WebRtcNetEQ_dtfm_aTbl48Khz;
- yInitTable = WebRtcNetEQ_dtfm_yInitTab48Khz;
- frameLen = 480;
-#endif
- }
- else
- {
- /* unsupported sample rate */
- return DTMF_GEN_UNKNOWN_SAMP_FREQ;
- }
-
- if (extFrameLen >= 0)
- {
- frameLen = extFrameLen;
- }
-
- /* select low frequency based on event value */
- switch (value)
- {
- case 1:
- case 2:
- case 3:
- case 12: /* first row on keypad */
- {
- lowIndex = 0; /* low frequency: 697 Hz */
- break;
- }
- case 4:
- case 5:
- case 6:
- case 13: /* second row on keypad */
- {
- lowIndex = 1; /* low frequency: 770 Hz */
- break;
- }
- case 7:
- case 8:
- case 9:
- case 14: /* third row on keypad */
- {
- lowIndex = 2; /* low frequency: 852 Hz */
- break;
- }
- case 0:
- case 10:
- case 11:
- case 15: /* fourth row on keypad */
- {
- lowIndex = 3; /* low frequency: 941 Hz */
- break;
- }
- default:
- {
- return DTMF_DEC_PARAMETER_ERROR;
- }
- } /* end switch */
-
- /* select high frequency based on event value */
- switch (value)
- {
- case 1:
- case 4:
- case 7:
- case 10: /* first column on keypad */
- {
- highIndex = 4; /* high frequency: 1209 Hz */
- break;
- }
- case 2:
- case 5:
- case 8:
- case 0: /* second column on keypad */
- {
- highIndex = 5;/* high frequency: 1336 Hz */
- break;
- }
- case 3:
- case 6:
- case 9:
- case 11: /* third column on keypad */
- {
- highIndex = 6;/* high frequency: 1477 Hz */
- break;
- }
- case 12:
- case 13:
- case 14:
- case 15: /* fourth column on keypad (special) */
- {
- highIndex = 7;/* high frequency: 1633 Hz */
- break;
- }
- } /* end switch */
-
- /* select coefficients based on results from switches above */
- a1 = aTbl[lowIndex]; /* coefficient for first (low) tone */
- a2 = aTbl[highIndex]; /* coefficient for second (high) tone */
-
- if (DTMFdecInst->reinit)
- {
- /* set initial values for the recursive model */
- DTMFdecInst->oldOutputLow[0] = yInitTable[lowIndex];
- DTMFdecInst->oldOutputLow[1] = 0;
- DTMFdecInst->oldOutputHigh[0] = yInitTable[highIndex];
- DTMFdecInst->oldOutputHigh[1] = 0;
-
- /* reset reinit flag */
- DTMFdecInst->reinit = 0;
- }
-
- /* generate signal sample by sample */
- for (i = 0; i < frameLen; i++)
- {
-
- /* Use rescursion formula y[n] = a*y[n-1] - y[n-2] */
- tempValLow
- = (int16_t) (((WEBRTC_SPL_MUL_16_16(a1, DTMFdecInst->oldOutputLow[1])
- + 8192) >> 14) - DTMFdecInst->oldOutputLow[0]);
- tempValHigh
- = (int16_t) (((WEBRTC_SPL_MUL_16_16(a2, DTMFdecInst->oldOutputHigh[1])
- + 8192) >> 14) - DTMFdecInst->oldOutputHigh[0]);
-
- /* Update recursion memory */
- DTMFdecInst->oldOutputLow[0] = DTMFdecInst->oldOutputLow[1];
- DTMFdecInst->oldOutputLow[1] = tempValLow;
- DTMFdecInst->oldOutputHigh[0] = DTMFdecInst->oldOutputHigh[1];
- DTMFdecInst->oldOutputHigh[1] = tempValHigh;
-
- /* scale high tone with 32768 (15 left shifts)
- and low tone with 23171 (3dB lower than high tone) */
- tempVal = WEBRTC_SPL_MUL_16_16(DTMF_AMP_LOW, tempValLow)
- + WEBRTC_SPL_LSHIFT_W32((int32_t)tempValHigh, 15);
-
- /* Norm the signal to Q14 (with proper rounding) */
- tempVal = (tempVal + 16384) >> 15;
-
- /* Scale the signal to correct dbM0 value */
- signal[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
- (WEBRTC_SPL_MUL_16_16(tempVal, WebRtcNetEQ_dtfm_dBm0[volume])
- + 8192), 14); /* volume value is in Q14; use proper rounding */
- }
-
- return frameLen;
-
-}
-
-#endif /* NETEQ_ATEVENT_DECODE */
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tonegen.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tonegen.h
deleted file mode 100644
index 5f4489940c2..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/dtmf_tonegen.h
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the DTMF tone generator function.
- */
-
-#ifndef DTMF_TONEGEN_H
-#define DTMF_TONEGEN_H
-
-#include "typedefs.h"
-
-#include "neteq_defines.h"
-
-#ifdef NETEQ_ATEVENT_DECODE
-/* Must compile NetEQ with DTMF support to enable the functionality */
-
-#define DTMF_AMP_LOW 23171 /* 3 dB lower than the high frequency */
-
-/* The DTMF generator struct (part of DSP main struct DSPInst_t) */
-typedef struct dtmf_tone_inst_t_
-{
-
- int16_t reinit; /* non-zero if the oscillator model should
- be reinitialized for next event */
- int16_t oldOutputLow[2]; /* oscillator recursion history (low tone) */
- int16_t oldOutputHigh[2]; /* oscillator recursion history (high tone) */
-
- int lastDtmfSample; /* index to the first non-DTMF sample in the
- speech history, if non-negative */
-}dtmf_tone_inst_t;
-
-/****************************************************************************
- * WebRtcNetEQ_DTMFGenerate(...)
- *
- * Generate 10 ms DTMF signal according to input parameters.
- *
- * Input:
- * - DTMFdecInst : DTMF instance
- * - value : DTMF event number (0-15)
- * - volume : Volume of generated signal (0-36)
- * Volume is given in negative dBm0, i.e., volume == 0
- * means 0 dBm0 while volume == 36 mean -36 dBm0.
- * - sampFreq : Sample rate in Hz
- *
- * Output:
- * - signal : Pointer to vector where DTMF signal is stored;
- * Vector must be at least sampFreq/100 samples long.
- * - DTMFdecInst : Updated DTMF instance
- *
- * Return value : >0 - Number of samples written to signal
- * : <0 - Error
- */
-
-int16_t WebRtcNetEQ_DTMFGenerate(dtmf_tone_inst_t *DTMFdecInst,
- int16_t value,
- int16_t volume,
- int16_t *signal,
- uint16_t sampFreq,
- int16_t frameLen
-);
-
-#endif /* NETEQ_ATEVENT_DECODE */
-
-#endif /* DTMF_TONEGEN_H */
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.c
deleted file mode 100644
index 9959f9222d7..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.c
+++ /dev/null
@@ -1,1220 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This is the function to expand from the speech history, to produce concealment data or
- * increasing delay.
- */
-
-#include "dsp.h"
-
-#include <assert.h>
-
-#include "signal_processing_library.h"
-
-#include "dsp_helpfunctions.h"
-#include "neteq_error_codes.h"
-
-#define CHECK_NO_OF_CORRMAX 3
-#define DISTLEN 20
-#define LPCANALASYSLEN 160
-
-/* Scratch usage:
-
- Type Name size startpos endpos
- (First part of first expand)
- int16_t pw16_bestCorrIndex 3 0 2
- int16_t pw16_bestCorr 3 3 5
- int16_t pw16_bestDistIndex 3 6 8
- int16_t pw16_bestDist 3 9 11
- int16_t pw16_corrVec 102*fs/8000 12 11+102*fs/8000
- func WebRtcNetEQ_Correlator 232 12+102*fs/8000 243+102*fs/8000
-
- (Second part of first expand)
- int32_t pw32_corr2 99*fs/8000+1 0 99*fs/8000
- int32_t pw32_autoCorr 2*7 0 13
- int16_t pw16_rc 6 14 19
-
- Signal combination:
- int16_t pw16_randVec 30+120*fs/8000 0 29+120*fs/8000
- int16_t pw16_scaledRandVec 125*fs/8000 30+120*fs/8000 29+245*fs/8000
- int16_t pw16_unvoicedVecSpace 10+125*fs/8000 30+245*fs/8000 39+370*fs/8000
-
- Total: 40+370*fs/8000 (size depends on UNVOICED_LPC_ORDER and BGN_LPC_ORDER)
- */
-
-#if ((BGN_LPC_ORDER > 10) || (UNVOICED_LPC_ORDER > 10)) && (defined SCRATCH)
-#error BGN_LPC_ORDER and/or BGN_LPC_ORDER are too large for current scratch memory allocation
-#endif
-
-#define SCRATCH_PW16_BEST_CORR_INDEX 0
-#define SCRATCH_PW16_BEST_CORR 3
-#define SCRATCH_PW16_BEST_DIST_INDEX 6
-#define SCRATCH_PW16_BEST_DIST 9
-#define SCRATCH_PW16_CORR_VEC 12
-#define SCRATCH_PW16_CORR2 0
-#define SCRATCH_PW32_AUTO_CORR 0
-#define SCRATCH_PW16_RC 14
-#define SCRATCH_PW16_RAND_VEC 0
-
-#if (defined(NETEQ_48KHZ_WIDEBAND))
-#define SCRATCH_NETEQDSP_CORRELATOR 624
-#define SCRATCH_PW16_SCALED_RAND_VEC 750
-#define SCRATCH_PW16_UNVOICED_VEC_SPACE 1500
-#elif (defined(NETEQ_32KHZ_WIDEBAND))
-#define SCRATCH_NETEQDSP_CORRELATOR 420
-#define SCRATCH_PW16_SCALED_RAND_VEC 510
-#define SCRATCH_PW16_UNVOICED_VEC_SPACE 1010
-#elif (defined(NETEQ_WIDEBAND))
-#define SCRATCH_NETEQDSP_CORRELATOR 216
-#define SCRATCH_PW16_SCALED_RAND_VEC 270
-#define SCRATCH_PW16_UNVOICED_VEC_SPACE 520
-#else /* NB */
-#define SCRATCH_NETEQDSP_CORRELATOR 114
-#define SCRATCH_PW16_SCALED_RAND_VEC 150
-#define SCRATCH_PW16_UNVOICED_VEC_SPACE 275
-#endif
-
-/****************************************************************************
- * WebRtcNetEQ_Expand(...)
- *
- * This function produces one "chunk" of expansion data (PLC audio). The
- * length of the produced audio depends on the speech history.
- *
- * Input:
- * - inst : DSP instance
- * - scratchPtr : Pointer to scratch vector
- * - outdata : Pointer to a memory space where the output data
- * should be stored
- * - BGNonly : If non-zero, "expand" will only produce background noise.
- * - pw16_len : Desired number of samples (only for BGN mode).
- *
- * Output:
- * - inst : Updated instance
- * - pw16_len : Number of samples that were output from NetEq
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_Expand(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_outData, int16_t *pw16_len,
- int16_t BGNonly)
-{
-
- int16_t fs_mult;
- ExpandInst_t *ExpandState = &(inst->ExpandInst);
- BGNInst_t *BGNState = &(inst->BGNInst);
- int i;
-#ifdef SCRATCH
- int16_t *pw16_randVec = pw16_scratchPtr + SCRATCH_PW16_RAND_VEC;
- int16_t *pw16_scaledRandVec = pw16_scratchPtr + SCRATCH_PW16_SCALED_RAND_VEC;
- int16_t *pw16_unvoicedVecSpace = pw16_scratchPtr + SCRATCH_PW16_UNVOICED_VEC_SPACE;
-#else
- int16_t pw16_randVec[FSMULT * 120 + 30]; /* 150 for NB and 270 for WB */
- int16_t pw16_scaledRandVec[FSMULT * 125]; /* 125 for NB and 250 for WB */
- int16_t pw16_unvoicedVecSpace[BGN_LPC_ORDER + FSMULT * 125];
-#endif
- /* 125 for NB and 250 for WB etc. Reuse pw16_outData[] for this vector */
- int16_t *pw16_voicedVecStorage = pw16_outData;
- int16_t *pw16_voicedVec = &pw16_voicedVecStorage[ExpandState->w16_overlap];
- int16_t *pw16_unvoicedVec = pw16_unvoicedVecSpace + UNVOICED_LPC_ORDER;
- int16_t *pw16_cngVec = pw16_unvoicedVecSpace + BGN_LPC_ORDER;
- int16_t w16_expVecsLen, w16_lag = 0, w16_expVecPos;
- int16_t w16_randLen;
- int16_t w16_vfractionChange; /* in Q14 */
- int16_t w16_winMute = 0, w16_winMuteInc = 0, w16_winUnMute = 0, w16_winUnMuteInc = 0;
- int32_t w32_tmp;
- int16_t w16_tmp, w16_tmp2;
- int16_t stability;
- enum BGNMode bgnMode = inst->BGNInst.bgnMode;
-
- /* Pre-calculate common multiplications with fs_mult */
- int16_t fsMult4;
- int16_t fsMult20;
- int16_t fsMult120;
- int16_t fsMultDistLen;
- int16_t fsMultLPCAnalasysLen;
-
-#ifdef NETEQ_STEREO
- MasterSlaveInfo *msInfo = inst->msInfo;
-#endif
-
- /* fs is uint16_t (to hold fs=48000) */
- fs_mult = WebRtcNetEQ_CalcFsMult(inst->fs); /* calculate fs/8000 */
-
- /* Pre-calculate common multiplications with fs_mult */
- fsMult4 = (int16_t) WEBRTC_SPL_MUL_16_16(fs_mult, 4);
- fsMult20 = (int16_t) WEBRTC_SPL_MUL_16_16(fs_mult, 20);
- fsMult120 = (int16_t) WEBRTC_SPL_MUL_16_16(fs_mult, 120);
- fsMultDistLen = (int16_t) WEBRTC_SPL_MUL_16_16(fs_mult, DISTLEN);
- fsMultLPCAnalasysLen = (int16_t) WEBRTC_SPL_MUL_16_16(fs_mult, LPCANALASYSLEN);
-
- /*
- * Perform all the initial setup if it's the first expansion.
- * If background noise (BGN) only, this setup is not needed.
- */
- if (ExpandState->w16_consecExp == 0 && !BGNonly)
- {
- /* Setup more variables */
-#ifdef SCRATCH
- int32_t *pw32_autoCorr = (int32_t*) (pw16_scratchPtr
- + SCRATCH_PW32_AUTO_CORR);
- int16_t *pw16_rc = pw16_scratchPtr + SCRATCH_PW16_RC;
- int16_t *pw16_bestCorrIndex = pw16_scratchPtr + SCRATCH_PW16_BEST_CORR_INDEX;
- int16_t *pw16_bestCorr = pw16_scratchPtr + SCRATCH_PW16_BEST_CORR;
- int16_t *pw16_bestDistIndex = pw16_scratchPtr + SCRATCH_PW16_BEST_DIST_INDEX;
- int16_t *pw16_bestDist = pw16_scratchPtr + SCRATCH_PW16_BEST_DIST;
- int16_t *pw16_corrVec = pw16_scratchPtr + SCRATCH_PW16_CORR_VEC;
- int32_t *pw32_corr2 = (int32_t*) (pw16_scratchPtr + SCRATCH_PW16_CORR2);
-#else
- int32_t pw32_autoCorr[UNVOICED_LPC_ORDER+1];
- int16_t pw16_rc[UNVOICED_LPC_ORDER];
- int16_t pw16_corrVec[FSMULT*102]; /* 102 for NB */
- int16_t pw16_bestCorrIndex[CHECK_NO_OF_CORRMAX];
- int16_t pw16_bestCorr[CHECK_NO_OF_CORRMAX];
- int16_t pw16_bestDistIndex[CHECK_NO_OF_CORRMAX];
- int16_t pw16_bestDist[CHECK_NO_OF_CORRMAX];
- int32_t pw32_corr2[(99*FSMULT)+1];
-#endif
- int32_t pw32_bestDist[CHECK_NO_OF_CORRMAX];
- int16_t w16_ind = 0;
- int16_t w16_corrVecLen;
- int16_t w16_corrScale;
- int16_t w16_distScale;
- int16_t w16_indMin, w16_indMax;
- int16_t w16_len;
- int32_t w32_en1, w32_en2, w32_cc;
- int16_t w16_en1Scale, w16_en2Scale;
- int16_t w16_en1, w16_en2;
- int32_t w32_en1_mul_en2;
- int16_t w16_sqrt_en1en2;
- int16_t w16_ccShiftL;
- int16_t w16_bestcorr; /* Correlation in Q14 */
- int16_t *pw16_vec1, *pw16_vec2;
- int16_t w16_factor;
- int16_t w16_DistLag, w16_CorrLag, w16_diffLag;
- int16_t w16_energyLen;
- int16_t w16_slope;
- int16_t w16_startInd;
- int16_t w16_noOfcorr2;
- int16_t w16_scale;
-
- /* Initialize some variables */
- ExpandState->w16_lagsDirection = 1;
- ExpandState->w16_lagsPosition = -1;
- ExpandState->w16_expandMuteFactor = 16384; /* Start from 1.0 (Q14) */
- BGNState->w16_mutefactor = 0; /* Start with 0 gain for BGN (value in Q14) */
- inst->w16_seedInc = 1;
-
-#ifdef NETEQ_STEREO
- /* Sanity for msInfo */
- if (msInfo == NULL)
- {
- /* this should not happen here */
- return MASTER_SLAVE_ERROR;
- }
-
- /*
- * Do not calculate correlations for slave instance(s)
- * unless lag info from master is corrupt
- */
- if ((msInfo->msMode != NETEQ_SLAVE)
- || ((msInfo->distLag <= 0) || (msInfo->corrLag <= 0)))
- {
-#endif
- /* Calculate correlation vector in downsampled domain (4 kHz sample rate) */
- w16_corrVecLen = WebRtcNetEQ_Correlator(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQDSP_CORRELATOR,
-#endif
- inst->pw16_speechHistory, inst->w16_speechHistoryLen, pw16_corrVec,
- &w16_corrScale);
-
- /* Find peaks in correlation vector using parabolic fit method */
- WebRtcNetEQ_PeakDetection(pw16_corrVec, w16_corrVecLen, CHECK_NO_OF_CORRMAX, fs_mult,
- pw16_bestCorrIndex, pw16_bestCorr);
-
- /*
- * Adjust peak locations; cross-correlation lags start at 2.5 ms
- * (20*fs_mult samples)
- */
- pw16_bestCorrIndex[0] += fsMult20;
- pw16_bestCorrIndex[1] += fsMult20;
- pw16_bestCorrIndex[2] += fsMult20;
-
- /* Calculate distortion around the 3 (CHECK_NO_OF_CORRMAX) best lags */
- w16_distScale = 0;
- for (i = 0; i < CHECK_NO_OF_CORRMAX; i++)
- {
- w16_tmp = fsMult20;
- w16_tmp2 = pw16_bestCorrIndex[i] - fsMult4;
- w16_indMin = WEBRTC_SPL_MAX(w16_tmp, w16_tmp2);
- w16_tmp = fsMult120 - 1;
- w16_tmp2 = pw16_bestCorrIndex[i] + fsMult4;
- w16_indMax = WEBRTC_SPL_MIN(w16_tmp, w16_tmp2);
-
- pw16_bestDistIndex[i] = WebRtcNetEQ_MinDistortion(
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - fsMultDistLen]),
- w16_indMin, w16_indMax, fsMultDistLen, &pw32_bestDist[i]);
-
- w16_distScale
- = WEBRTC_SPL_MAX(16 - WebRtcSpl_NormW32(pw32_bestDist[i]), w16_distScale);
-
- }
-
- /* Shift the distortion values to fit in int16_t */
- WebRtcSpl_VectorBitShiftW32ToW16(pw16_bestDist, CHECK_NO_OF_CORRMAX, pw32_bestDist,
- w16_distScale);
-
- /*
- * Find index of maximum criteria, where crit[i] = bestCorr[i])/(bestDist[i])
- * Do this by a cross multiplication.
- */
-
- w32_en1 = WEBRTC_SPL_MUL_16_16((int32_t) pw16_bestCorr[0],pw16_bestDist[1]);
- w32_en2 = WEBRTC_SPL_MUL_16_16((int32_t) pw16_bestCorr[1],pw16_bestDist[0]);
- if (w32_en1 >= w32_en2)
- {
- /* 0 wins over 1 */
- w32_en1
- = WEBRTC_SPL_MUL_16_16((int32_t) pw16_bestCorr[0], pw16_bestDist[2]);
- w32_en2
- = WEBRTC_SPL_MUL_16_16((int32_t) pw16_bestCorr[2], pw16_bestDist[0]);
- if (w32_en1 >= w32_en2)
- {
- /* 0 wins over 2 */
- w16_ind = 0;
- }
- else
- {
- /* 2 wins over 0 */
- w16_ind = 2;
- }
- }
- else
- {
- /* 1 wins over 0 */
- w32_en1
- = WEBRTC_SPL_MUL_16_16((int32_t) pw16_bestCorr[1],pw16_bestDist[2]);
- w32_en2
- = WEBRTC_SPL_MUL_16_16((int32_t) pw16_bestCorr[2],pw16_bestDist[1]);
- if ((int32_t) w32_en1 >= (int32_t) w32_en2)
- {
- /* 1 wins over 2 */
- w16_ind = 1;
- }
- else
- {
- /* 2 wins over 1 */
- w16_ind = 2;
- }
- }
-
-#ifdef NETEQ_STEREO
- }
-
- /* Store DistLag and CorrLag of the position with highest criteria */
- if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO)
- || ((msInfo->msMode == NETEQ_SLAVE) && (msInfo->distLag <= 0 || msInfo->corrLag
- <= 0)))
- {
- /* lags not provided externally */
- w16_DistLag = pw16_bestDistIndex[w16_ind];
- w16_CorrLag = pw16_bestCorrIndex[w16_ind];
- if (msInfo->msMode == NETEQ_MASTER)
- {
- msInfo->distLag = w16_DistLag;
- msInfo->corrLag = w16_CorrLag;
- }
- }
- else if (msInfo->msMode == NETEQ_SLAVE)
- {
- /* lags provided externally (from master) */
- w16_DistLag = msInfo->distLag;
- w16_CorrLag = msInfo->corrLag;
-
- /* sanity for lag values */
- if ((w16_DistLag <= 0) || (w16_CorrLag <= 0))
- {
- return MASTER_SLAVE_ERROR;
- }
- }
- else
- {
- /* Invalid mode */
- return MASTER_SLAVE_ERROR;
- }
-#else /* not NETEQ_STEREO */
- w16_DistLag = pw16_bestDistIndex[w16_ind];
- w16_CorrLag = pw16_bestCorrIndex[w16_ind];
-#endif
-
- ExpandState->w16_maxLag = WEBRTC_SPL_MAX(w16_DistLag, w16_CorrLag);
-
- /* Calculate the exact best correlation (in the range within CorrLag-DistLag) */
- w16_len = w16_DistLag + 10;
- w16_len = WEBRTC_SPL_MIN(w16_len, fsMult120);
- w16_len = WEBRTC_SPL_MAX(w16_len, 60 * fs_mult);
-
- w16_startInd = WEBRTC_SPL_MIN(w16_DistLag, w16_CorrLag);
- w16_noOfcorr2 = WEBRTC_SPL_ABS_W16((w16_DistLag-w16_CorrLag)) + 1;
- /* w16_noOfcorr2 maximum value is 99*fs_mult + 1 */
-
- /* Calculate suitable scaling */
- w16_tmp
- = WebRtcSpl_MaxAbsValueW16(
- &inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len - w16_startInd
- - w16_noOfcorr2],
- (int16_t) (w16_len + w16_startInd + w16_noOfcorr2 - 1));
- w16_corrScale = ((31 - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_tmp, w16_tmp)))
- + (31 - WebRtcSpl_NormW32(w16_len))) - 31;
- w16_corrScale = WEBRTC_SPL_MAX(0, w16_corrScale);
-
- /*
- * Perform the correlation, store in pw32_corr2
- */
-
- WebRtcNetEQ_CrossCorr(pw32_corr2,
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len]),
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len - w16_startInd]),
- w16_len, w16_noOfcorr2, w16_corrScale, -1);
-
- /* Find maximizing index */
- w16_ind = WebRtcSpl_MaxIndexW32(pw32_corr2, w16_noOfcorr2);
- w32_cc = pw32_corr2[w16_ind]; /* this is maximum correlation */
- w16_ind = w16_ind + w16_startInd; /* correct index for start offset */
-
- /* Calculate energies */
- w32_en1 = WebRtcNetEQ_DotW16W16(
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len]),
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len]), w16_len,
- w16_corrScale);
- w32_en2 = WebRtcNetEQ_DotW16W16(
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len - w16_ind]),
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len - w16_ind]),
- w16_len, w16_corrScale);
-
- /* Calculate the correlation value w16_bestcorr */
- if ((w32_en1 > 0) && (w32_en2 > 0))
- {
- w16_en1Scale = 16 - WebRtcSpl_NormW32(w32_en1);
- w16_en1Scale = WEBRTC_SPL_MAX(0, w16_en1Scale);
- w16_en2Scale = 16 - WebRtcSpl_NormW32(w32_en2);
- w16_en2Scale = WEBRTC_SPL_MAX(0, w16_en2Scale);
- /* Make sure total scaling is even (to simplify scale factor after sqrt) */
- if ((w16_en1Scale + w16_en2Scale) & 1)
- {
- /* if sum is odd */
- w16_en1Scale += 1;
- }
- w16_en1 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
- w16_en2 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
- w32_en1_mul_en2 = WEBRTC_SPL_MUL_16_16(w16_en1, w16_en2);
- w16_sqrt_en1en2 = (int16_t) WebRtcSpl_SqrtFloor(w32_en1_mul_en2);
-
- /* Calculate cc/sqrt(en1*en2) in Q14 */
- w16_ccShiftL = 14 - ((w16_en1Scale + w16_en2Scale) >> 1);
- w32_cc = WEBRTC_SPL_SHIFT_W32(w32_cc, w16_ccShiftL);
- w16_bestcorr = (int16_t) WebRtcSpl_DivW32W16(w32_cc, w16_sqrt_en1en2);
- w16_bestcorr = WEBRTC_SPL_MIN(16384, w16_bestcorr); /* set maximum to 1.0 */
-
- }
- else
- {
- /* if either en1 or en2 is zero */
- w16_bestcorr = 0;
- }
-
- /*
- * Extract the two vectors, pw16_expVecs[0][] and pw16_expVecs[1][],
- * from the SpeechHistory[]
- */
- w16_expVecsLen = ExpandState->w16_maxLag + ExpandState->w16_overlap;
- pw16_vec1 = &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_expVecsLen]);
- pw16_vec2 = pw16_vec1 - w16_DistLag;
- /* Normalize the second vector to the same energy as the first */
- w32_en1 = WebRtcNetEQ_DotW16W16(pw16_vec1, pw16_vec1, w16_expVecsLen, w16_corrScale);
- w32_en2 = WebRtcNetEQ_DotW16W16(pw16_vec2, pw16_vec2, w16_expVecsLen, w16_corrScale);
-
- /*
- * Confirm that energy factor sqrt(w32_en1/w32_en2) is within difference 0.5 - 2.0
- * w32_en1/w32_en2 within 0.25 - 4
- */
- if (((w32_en1 >> 2) < w32_en2) && ((w32_en1) > (w32_en2 >> 2)))
- {
-
- /* Energy constraint fulfilled => use both vectors and scale them accordingly */
- w16_en2Scale = 16 - WebRtcSpl_NormW32(w32_en2);
- w16_en2Scale = WEBRTC_SPL_MAX(0, w16_en2Scale);
- w16_en1Scale = w16_en2Scale - 13;
-
- /* calculate w32_en1/w32_en2 in Q13 */
- w32_en1_mul_en2 = WebRtcSpl_DivW32W16(
- WEBRTC_SPL_SHIFT_W32(w32_en1, -w16_en1Scale),
- (int16_t) (WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale)));
-
- /* calculate factor in Q13 (sqrt of en1/en2 in Q26) */
- w16_factor = (int16_t) WebRtcSpl_SqrtFloor(
- WEBRTC_SPL_LSHIFT_W32(w32_en1_mul_en2, 13));
-
- /* Copy the two vectors and give them the same energy */
-
- WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_expVecs[0], pw16_vec1, w16_expVecsLen);
- WebRtcSpl_AffineTransformVector(ExpandState->pw16_expVecs[1], pw16_vec2,
- w16_factor, 4096, 13, w16_expVecsLen);
-
- }
- else
- {
- /* Energy change constraint not fulfilled => only use last vector */
-
- WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_expVecs[0], pw16_vec1, w16_expVecsLen);
- WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_expVecs[1], ExpandState->pw16_expVecs[0],
- w16_expVecsLen);
-
- /* Set the w16_factor since it is used by muting slope */
- if (((w32_en1 >> 2) < w32_en2) || (w32_en2 == 0))
- {
- w16_factor = 4096; /* 0.5 in Q13*/
- }
- else
- {
- w16_factor = 16384; /* 2.0 in Q13*/
- }
- }
-
- /* Set the 3 lag values */
- w16_diffLag = w16_DistLag - w16_CorrLag;
- if (w16_diffLag == 0)
- {
- /* DistLag and CorrLag are equal */
- ExpandState->w16_lags[0] = w16_DistLag;
- ExpandState->w16_lags[1] = w16_DistLag;
- ExpandState->w16_lags[2] = w16_DistLag;
- }
- else
- {
- /* DistLag and CorrLag are not equal; use different combinations of the two */
- ExpandState->w16_lags[0] = w16_DistLag; /* DistLag only */
- ExpandState->w16_lags[1] = ((w16_DistLag + w16_CorrLag) >> 1); /* 50/50 */
- /* Third lag, move one half-step towards CorrLag (in both cases) */
- if (w16_diffLag > 0)
- {
- ExpandState->w16_lags[2] = (w16_DistLag + w16_CorrLag - 1) >> 1;
- }
- else
- {
- ExpandState->w16_lags[2] = (w16_DistLag + w16_CorrLag + 1) >> 1;
- }
- }
-
- /*************************************************
- * Calculate the LPC and the gain of the filters *
- *************************************************/
-
- /* Calculate scale value needed for autocorrelation */
- w16_tmp = WebRtcSpl_MaxAbsValueW16(
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - fsMultLPCAnalasysLen]),
- fsMultLPCAnalasysLen);
-
- w16_tmp = 16 - WebRtcSpl_NormW32(w16_tmp);
- w16_tmp = WEBRTC_SPL_MIN(w16_tmp,0);
- w16_tmp = (w16_tmp << 1) + 7;
- w16_tmp = WEBRTC_SPL_MAX(w16_tmp,0);
-
- /* set w16_ind to simplify the following expressions */
- w16_ind = inst->w16_speechHistoryLen - fsMultLPCAnalasysLen - UNVOICED_LPC_ORDER;
-
- /* store first UNVOICED_LPC_ORDER samples in pw16_rc */
-
- WEBRTC_SPL_MEMCPY_W16(pw16_rc, &inst->pw16_speechHistory[w16_ind], UNVOICED_LPC_ORDER);
-
- /* set first samples to zero */
- WebRtcSpl_MemSetW16(&inst->pw16_speechHistory[w16_ind], 0, UNVOICED_LPC_ORDER);
-
- /* Calculate UNVOICED_LPC_ORDER+1 lags of the ACF */
-
- WebRtcNetEQ_CrossCorr(
- pw32_autoCorr, &(inst->pw16_speechHistory[w16_ind + UNVOICED_LPC_ORDER]),
- &(inst->pw16_speechHistory[w16_ind + UNVOICED_LPC_ORDER]), fsMultLPCAnalasysLen,
- UNVOICED_LPC_ORDER + 1, w16_tmp, -1);
-
- /* Recover the stored samples from pw16_rc */
-
- WEBRTC_SPL_MEMCPY_W16(&inst->pw16_speechHistory[w16_ind], pw16_rc, UNVOICED_LPC_ORDER);
-
- if (pw32_autoCorr[0] > 0)
- { /* check that variance is positive */
-
- /* estimate AR filter parameters using Levinson-Durbin algorithm
- (UNVOICED_LPC_ORDER+1 filter coefficients) */
- stability = WebRtcSpl_LevinsonDurbin(pw32_autoCorr, ExpandState->pw16_arFilter,
- pw16_rc, UNVOICED_LPC_ORDER);
-
- /* Only update BGN if filter is stable */
- if (stability != 1)
- {
- /* Set first coefficient to 4096 (1.0 in Q12)*/
- ExpandState->pw16_arFilter[0] = 4096;
- /* Set remaining UNVOICED_LPC_ORDER coefficients to zero */
- WebRtcSpl_MemSetW16(ExpandState->pw16_arFilter + 1, 0, UNVOICED_LPC_ORDER);
- }
-
- }
-
- if (w16_DistLag < 40)
- {
- w16_energyLen = 2 * w16_DistLag;
- }
- else
- {
- w16_energyLen = w16_DistLag;
- }
- w16_randLen = w16_energyLen + 30; /* Startup part */
-
- /* Extract a noise segment */
- if (w16_randLen <= RANDVEC_NO_OF_SAMPLES)
- {
- WEBRTC_SPL_MEMCPY_W16(pw16_randVec,
- (int16_t*) WebRtcNetEQ_kRandnTbl, w16_randLen);
- }
- else
- { /* only applies to SWB where length could be larger than 256 */
-#if FSMULT >= 2 /* Makes pw16_randVec longer than RANDVEC_NO_OF_SAMPLES. */
- WEBRTC_SPL_MEMCPY_W16(pw16_randVec, (int16_t*) WebRtcNetEQ_kRandnTbl,
- RANDVEC_NO_OF_SAMPLES);
- inst->w16_seedInc = (inst->w16_seedInc + 2) & (RANDVEC_NO_OF_SAMPLES - 1);
- assert(w16_randLen <= FSMULT * 120 + 30);
- WebRtcNetEQ_RandomVec(&inst->uw16_seed, &pw16_randVec[RANDVEC_NO_OF_SAMPLES],
- (int16_t) (w16_randLen - RANDVEC_NO_OF_SAMPLES), inst->w16_seedInc);
-#else
- assert(0);
-#endif
- }
-
- /* Set up state vector and calculate scale factor for unvoiced filtering */
-
- WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_arState,
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - UNVOICED_LPC_ORDER]),
- UNVOICED_LPC_ORDER);
- WEBRTC_SPL_MEMCPY_W16(pw16_unvoicedVec - UNVOICED_LPC_ORDER,
- &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - 128 - UNVOICED_LPC_ORDER]),
- UNVOICED_LPC_ORDER);
- WebRtcSpl_FilterMAFastQ12(&inst->pw16_speechHistory[inst->w16_speechHistoryLen - 128],
- pw16_unvoicedVec, ExpandState->pw16_arFilter, UNVOICED_LPC_ORDER + 1, 128);
- if (WebRtcSpl_MaxAbsValueW16(pw16_unvoicedVec, 128) > 4000)
- {
- w16_scale = 4;
- }
- else
- {
- w16_scale = 0;
- }
- w32_tmp = WebRtcNetEQ_DotW16W16(pw16_unvoicedVec, pw16_unvoicedVec, 128, w16_scale);
-
- /* Normalize w32_tmp to 28 or 29 bits to preserve sqrt() accuracy */
- w16_tmp = WebRtcSpl_NormW32(w32_tmp) - 3;
- w16_tmp += ((w16_tmp & 0x1) ^ 0x1); /* Make sure we do an odd number of shifts since we
- from earlier have 7 shifts from dividing with 128.*/
- w32_tmp = WEBRTC_SPL_SHIFT_W32(w32_tmp, w16_tmp);
- w32_tmp = WebRtcSpl_SqrtFloor(w32_tmp);
- ExpandState->w16_arGainScale = 13 + ((w16_tmp + 7 - w16_scale) >> 1);
- ExpandState->w16_arGain = (int16_t) w32_tmp;
-
- /********************************************************************
- * Calculate vfraction from bestcorr *
- * if (bestcorr>0.480665) *
- * vfraction = ((bestcorr-0.4)/(1-0.4)).^2 *
- * else vfraction = 0 *
- * *
- * approximation (coefficients in Q12): *
- * if (x>0.480665) (y(x)<0.3) *
- * y(x) = -1.264421 + 4.8659148*x - 4.0092827*x^2 + 1.4100529*x^3 *
- * else y(x) = 0; *
- ********************************************************************/
-
- if (w16_bestcorr > 7875)
- {
- /* if x>0.480665 */
- int16_t w16_x1, w16_x2, w16_x3;
- w16_x1 = w16_bestcorr;
- w32_tmp = WEBRTC_SPL_MUL_16_16((int32_t) w16_x1, w16_x1);
- w16_x2 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 14);
- w32_tmp = WEBRTC_SPL_MUL_16_16(w16_x1, w16_x2);
- w16_x3 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 14);
- w32_tmp
- = (int32_t) WEBRTC_SPL_LSHIFT_W32((int32_t) WebRtcNetEQ_kMixFractionFuncTbl[0], 14);
- w32_tmp
- += (int32_t) WEBRTC_SPL_MUL_16_16(WebRtcNetEQ_kMixFractionFuncTbl[1], w16_x1);
- w32_tmp
- += (int32_t) WEBRTC_SPL_MUL_16_16(WebRtcNetEQ_kMixFractionFuncTbl[2], w16_x2);
- w32_tmp
- += (int32_t) WEBRTC_SPL_MUL_16_16(WebRtcNetEQ_kMixFractionFuncTbl[3], w16_x3);
- ExpandState->w16_vFraction = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 12);
- ExpandState->w16_vFraction = WEBRTC_SPL_MIN(ExpandState->w16_vFraction, 16384);
- ExpandState->w16_vFraction = WEBRTC_SPL_MAX(ExpandState->w16_vFraction, 0);
- }
- else
- {
- ExpandState->w16_vFraction = 0;
- }
-
- /***********************************************************************
- * Calculate muting slope, reuse value from earlier scaling of ExpVecs *
- ***********************************************************************/
- w16_slope = w16_factor;
-
- if (w16_slope > 12288)
- {
- /* w16_slope > 1.5 ? */
- /* Calculate (1-(1/slope))/w16_DistLag = (slope-1)/(w16_DistLag*slope) */
- w32_tmp = w16_slope - 8192;
- w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, 12); /* Value in Q25 (13+12=25) */
- w16_tmp = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(w16_DistLag,
- w16_slope, 8); /* Value in Q5 (13-8=5) */
- w16_tmp = (int16_t) WebRtcSpl_DivW32W16(w32_tmp,
- w16_tmp); /* Res in Q20 (25-5=20) */
-
- if (w16_slope > 14746)
- { /* w16_slope > 1.8 ? */
- ExpandState->w16_muteSlope = (w16_tmp + 1) >> 1;
- }
- else
- {
- ExpandState->w16_muteSlope = (w16_tmp + 4) >> 3;
- }
- ExpandState->w16_onset = 1;
- }
- else if (ExpandState->w16_vFraction > 13107)
- {
- /* w16_vFraction > 0.8 ? */
- if (w16_slope > 8028)
- {
- /* w16_vFraction > 0.98 ? */
- ExpandState->w16_muteSlope = 0;
- }
- else
- {
- /* Calculate (1-slope)/w16_DistLag */
- w32_tmp = 8192 - w16_slope;
- w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, 7); /* Value in Q20 (13+7=20) */
- ExpandState->w16_muteSlope = (int16_t) WebRtcSpl_DivW32W16(w32_tmp,
- w16_DistLag); /* Res in Q20 (20-0=20) */
- }
- ExpandState->w16_onset = 0;
- }
- else
- {
- /*
- * Use the minimum of 0.005 (0.9 on 50 samples in NB and the slope)
- * and ((1-slope)/w16_DistLag)
- */
- w32_tmp = 8192 - w16_slope;
- w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, 7); /* Value in Q20 (13+7=20) */
- w32_tmp = WEBRTC_SPL_MAX(w32_tmp, 0);
- ExpandState->w16_muteSlope = (int16_t) WebRtcSpl_DivW32W16(w32_tmp,
- w16_DistLag); /* Res in Q20 (20-0=20) */
- w16_tmp = WebRtcNetEQ_k5243div[fs_mult]; /* 0.005/fs_mult = 5243/fs_mult */
- ExpandState->w16_muteSlope = WEBRTC_SPL_MAX(w16_tmp, ExpandState->w16_muteSlope);
- ExpandState->w16_onset = 0;
- }
- }
- else
- {
- /* This is not the first Expansion, parameters are already estimated. */
-
- /* Extract a noise segment */
- if (BGNonly) /* If we should produce nothing but background noise */
- {
- if (*pw16_len > 0)
- {
- /*
- * Set length to input parameter length, but not more than length
- * of pw16_randVec
- */
- w16_lag = WEBRTC_SPL_MIN(*pw16_len, FSMULT * 120 + 30);
- }
- else
- {
- /* set length to 15 ms */
- w16_lag = fsMult120;
- }
- w16_randLen = w16_lag;
- }
- else
- {
- w16_randLen = ExpandState->w16_maxLag;
- }
-
- if (w16_randLen <= RANDVEC_NO_OF_SAMPLES)
- {
- inst->w16_seedInc = (inst->w16_seedInc + 2) & (RANDVEC_NO_OF_SAMPLES - 1);
- WebRtcNetEQ_RandomVec(&inst->uw16_seed, pw16_randVec, w16_randLen,
- inst->w16_seedInc);
- }
- else
- { /* only applies to SWB where length could be larger than 256 */
-#if FSMULT >= 2 /* Makes pw16_randVec longer than RANDVEC_NO_OF_SAMPLES. */
- inst->w16_seedInc = (inst->w16_seedInc + 2) & (RANDVEC_NO_OF_SAMPLES - 1);
- WebRtcNetEQ_RandomVec(&inst->uw16_seed, pw16_randVec, RANDVEC_NO_OF_SAMPLES,
- inst->w16_seedInc);
- inst->w16_seedInc = (inst->w16_seedInc + 2) & (RANDVEC_NO_OF_SAMPLES - 1);
- assert(w16_randLen <= FSMULT * 120 + 30);
- WebRtcNetEQ_RandomVec(&inst->uw16_seed, &pw16_randVec[RANDVEC_NO_OF_SAMPLES],
- (int16_t) (w16_randLen - RANDVEC_NO_OF_SAMPLES), inst->w16_seedInc);
-#else
- assert(0);
-#endif
- }
- } /* end if(first expand or BGNonly) ... else ... */
-
- if (!BGNonly) /* Voiced and unvoiced parts not used if generating BGN only */
- {
-
- /*************************************************
- * Generate signal *
- *************************************************/
-
- /*
- * Voiced part
- */
-
- /* Linearly mute the use_vfraction value from 1 to vfraction */
- if (ExpandState->w16_consecExp == 0)
- {
- ExpandState->w16_currentVFraction = 16384; /* 1.0 in Q14 */
- }
-
- ExpandState->w16_lagsPosition = ExpandState->w16_lagsPosition
- + ExpandState->w16_lagsDirection;
-
- /* Change direction if needed */
- if (ExpandState->w16_lagsPosition == 0)
- {
- ExpandState->w16_lagsDirection = 1;
- }
- if (ExpandState->w16_lagsPosition == 2)
- {
- ExpandState->w16_lagsDirection = -1;
- }
-
- /* Generate a weighted vector with the selected lag */
- w16_expVecsLen = ExpandState->w16_maxLag + ExpandState->w16_overlap;
- w16_lag = ExpandState->w16_lags[ExpandState->w16_lagsPosition];
- /* Copy lag+overlap data */
- w16_expVecPos = w16_expVecsLen - w16_lag - ExpandState->w16_overlap;
- w16_tmp = w16_lag + ExpandState->w16_overlap;
- if (ExpandState->w16_lagsPosition == 0)
- {
- WEBRTC_SPL_MEMCPY_W16(pw16_voicedVecStorage,
- &(ExpandState->pw16_expVecs[0][w16_expVecPos]), w16_tmp);
- }
- else if (ExpandState->w16_lagsPosition == 1)
- {
- WebRtcSpl_ScaleAndAddVectorsWithRound(&ExpandState->pw16_expVecs[0][w16_expVecPos], 3,
- &ExpandState->pw16_expVecs[1][w16_expVecPos], 1, 2, pw16_voicedVecStorage,
- w16_tmp);
-
- }
- else if (ExpandState->w16_lagsPosition == 2)
- {
- WebRtcSpl_ScaleAndAddVectorsWithRound(&ExpandState->pw16_expVecs[0][w16_expVecPos], 1,
- &ExpandState->pw16_expVecs[1][w16_expVecPos], 1, 1, pw16_voicedVecStorage,
- w16_tmp);
- }
-
- if (inst->fs == 8000)
- {
- /* Windowing in Q15 */
- w16_winMute = NETEQ_OVERLAP_WINMUTE_8KHZ_START;
- w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_8KHZ_INC;
- w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_8KHZ_START;
- w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_8KHZ_INC;
-#ifdef NETEQ_WIDEBAND
- }
- else if (inst->fs == 16000)
- {
- /* Windowing in Q15 */
- w16_winMute = NETEQ_OVERLAP_WINMUTE_16KHZ_START;
- w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_16KHZ_INC;
- w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_16KHZ_START;
- w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_16KHZ_INC;
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- }
- else if (inst->fs == 32000)
- {
- /* Windowing in Q15 */
- w16_winMute = NETEQ_OVERLAP_WINMUTE_32KHZ_START;
- w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_32KHZ_INC;
- w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_32KHZ_START;
- w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_32KHZ_INC;
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- }
- else /* if (inst->fs==48000) */
- {
- /* Windowing in Q15 */
- w16_winMute = NETEQ_OVERLAP_WINMUTE_48KHZ_START;
- w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_48KHZ_INC;
- w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_48KHZ_START;
- w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_48KHZ_INC;
-#endif
- }
-
- /* Smooth the expanded if it has not been muted to or vfraction is larger than 0.5 */
- if ((ExpandState->w16_expandMuteFactor > 819) && (ExpandState->w16_currentVFraction
- > 8192))
- {
- for (i = 0; i < ExpandState->w16_overlap; i++)
- {
- /* Do overlap add between new vector and overlap */
- ExpandState->pw16_overlapVec[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
- WEBRTC_SPL_MUL_16_16(ExpandState->pw16_overlapVec[i], w16_winMute) +
- WEBRTC_SPL_MUL_16_16(
- WEBRTC_SPL_MUL_16_16_RSFT(ExpandState->w16_expandMuteFactor,
- pw16_voicedVecStorage[i], 14), w16_winUnMute) + 16384, 15);
- w16_winMute += w16_winMuteInc;
- w16_winUnMute += w16_winUnMuteInc;
- }
- }
- else if (ExpandState->w16_expandMuteFactor == 0
-#ifdef NETEQ_STEREO
- && msInfo->msMode == NETEQ_MONO /* only if mono mode is selected */
-#endif
- )
- {
- /* if ExpandState->w16_expandMuteFactor = 0 => all is CNG component
- set the output length to 15ms (for best CNG production) */
- w16_tmp = fsMult120;
- ExpandState->w16_maxLag = w16_tmp;
- ExpandState->w16_lags[0] = w16_tmp;
- ExpandState->w16_lags[1] = w16_tmp;
- ExpandState->w16_lags[2] = w16_tmp;
- }
-
- /*
- * Unvoiced part
- */
-
- WEBRTC_SPL_MEMCPY_W16(pw16_unvoicedVec - UNVOICED_LPC_ORDER,
- ExpandState->pw16_arState,
- UNVOICED_LPC_ORDER);
- if (ExpandState->w16_arGainScale > 0)
- {
- w32_tmp = ((int32_t) 1) << (ExpandState->w16_arGainScale - 1);
- }
- else
- {
- w32_tmp = 0;
- }
-
- /* Note that shift value can be >16 which complicates things for some DSPs */
- WebRtcSpl_AffineTransformVector(pw16_scaledRandVec, pw16_randVec,
- ExpandState->w16_arGain, w32_tmp, ExpandState->w16_arGainScale, w16_lag);
-
- WebRtcSpl_FilterARFastQ12(pw16_scaledRandVec, pw16_unvoicedVec,
- ExpandState->pw16_arFilter, UNVOICED_LPC_ORDER + 1, w16_lag);
-
- WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_arState,
- &(pw16_unvoicedVec[w16_lag - UNVOICED_LPC_ORDER]),
- UNVOICED_LPC_ORDER);
-
- /*
- * Voiced + Unvoiced
- */
-
- /* For lag =
- <=31*fs_mult => go from 1 to 0 in about 8 ms
- (>=31..<=63)*fs_mult => go from 1 to 0 in about 16 ms
- >=64*fs_mult => go from 1 to 0 in about 32 ms
- */
- w16_tmp = (31 - WebRtcSpl_NormW32(ExpandState->w16_maxLag)) - 5; /* getbits(w16_maxLag) -5 */
- w16_vfractionChange = (int16_t) WEBRTC_SPL_RSHIFT_W32(256, w16_tmp);
- if (ExpandState->w16_stopMuting == 1)
- {
- w16_vfractionChange = 0;
- }
-
- /* Create combined signal (unmuted) by shifting in more and more of unvoiced part */
- w16_tmp = 8 - w16_tmp; /* getbits(w16_vfractionChange) */
- w16_tmp = (ExpandState->w16_currentVFraction - ExpandState->w16_vFraction) >> w16_tmp;
- w16_tmp = WEBRTC_SPL_MIN(w16_tmp, w16_lag);
- WebRtcNetEQ_MixVoiceUnvoice(pw16_outData, pw16_voicedVec, pw16_unvoicedVec,
- &ExpandState->w16_currentVFraction, w16_vfractionChange, w16_tmp);
-
- if (w16_tmp < w16_lag)
- {
- if (w16_vfractionChange != 0)
- {
- ExpandState->w16_currentVFraction = ExpandState->w16_vFraction;
- }
- w16_tmp2 = 16384 - ExpandState->w16_currentVFraction;
- WebRtcSpl_ScaleAndAddVectorsWithRound(pw16_voicedVec + w16_tmp,
- ExpandState->w16_currentVFraction, pw16_unvoicedVec + w16_tmp, w16_tmp2, 14,
- pw16_outData + w16_tmp, (int16_t) (w16_lag - w16_tmp));
- }
-
- /* Select muting factor */
- if (ExpandState->w16_consecExp == 3)
- {
- /* 0.95 on 50 samples in NB (0.0010/fs_mult in Q20) */
- ExpandState->w16_muteSlope = WEBRTC_SPL_MAX(ExpandState->w16_muteSlope,
- WebRtcNetEQ_k1049div[fs_mult]);
- }
- if (ExpandState->w16_consecExp == 7)
- {
- /* 0.90 on 50 samples in NB (0.0020/fs_mult in Q20) */
- ExpandState->w16_muteSlope = WEBRTC_SPL_MAX(ExpandState->w16_muteSlope,
- WebRtcNetEQ_k2097div[fs_mult]);
- }
-
- /* Mute segment according to slope value */
- if ((ExpandState->w16_consecExp != 0) || (ExpandState->w16_onset != 1))
- {
- /* Mute to the previous level, then continue with the muting */
- WebRtcSpl_AffineTransformVector(pw16_outData, pw16_outData,
- ExpandState->w16_expandMuteFactor, 8192, 14, w16_lag);
-
- if ((ExpandState->w16_stopMuting != 1))
- {
- WebRtcNetEQ_MuteSignal(pw16_outData, ExpandState->w16_muteSlope, w16_lag);
-
- w16_tmp = 16384 - (int16_t) ((WEBRTC_SPL_MUL_16_16(w16_lag,
- ExpandState->w16_muteSlope) + 8192) >> 6); /* 20-14 = 6 */
- w16_tmp = (int16_t) ((WEBRTC_SPL_MUL_16_16(w16_tmp,
- ExpandState->w16_expandMuteFactor) + 8192) >> 14);
-
- /* Guard against getting stuck with very small (but sometimes audible) gain */
- if ((ExpandState->w16_consecExp > 3) && (w16_tmp
- >= ExpandState->w16_expandMuteFactor))
- {
- ExpandState->w16_expandMuteFactor = 0;
- }
- else
- {
- ExpandState->w16_expandMuteFactor = w16_tmp;
- }
- }
- }
-
- } /* end if(!BGNonly) */
-
- /*
- * BGN
- */
-
- if (BGNState->w16_initialized == 1)
- {
- /* BGN parameters are initialized; use them */
-
- WEBRTC_SPL_MEMCPY_W16(pw16_cngVec - BGN_LPC_ORDER,
- BGNState->pw16_filterState,
- BGN_LPC_ORDER);
-
- if (BGNState->w16_scaleShift > 1)
- {
- w32_tmp = ((int32_t) 1) << (BGNState->w16_scaleShift - 1);
- }
- else
- {
- w32_tmp = 0;
- }
-
- /* Scale random vector to correct energy level */
- /* Note that shift value can be >16 which complicates things for some DSPs */
- WebRtcSpl_AffineTransformVector(pw16_scaledRandVec, pw16_randVec,
- BGNState->w16_scale, w32_tmp, BGNState->w16_scaleShift, w16_lag);
-
- WebRtcSpl_FilterARFastQ12(pw16_scaledRandVec, pw16_cngVec, BGNState->pw16_filter,
- BGN_LPC_ORDER + 1, w16_lag);
-
- WEBRTC_SPL_MEMCPY_W16(BGNState->pw16_filterState,
- &(pw16_cngVec[w16_lag-BGN_LPC_ORDER]),
- BGN_LPC_ORDER);
-
- /* Unmute the insertion of background noise */
-
- if (bgnMode == BGN_FADE && ExpandState->w16_consecExp >= FADE_BGN_TIME
- && BGNState->w16_mutefactor > 0)
- {
- /* fade BGN to zero */
- /* calculate muting slope, approx 2^18/fsHz */
- int16_t muteFactor;
- if (fs_mult == 1)
- {
- muteFactor = -32;
- }
- else if (fs_mult == 2)
- {
- muteFactor = -16;
- }
- else if (fs_mult == 4)
- {
- muteFactor = -8;
- }
- else
- {
- muteFactor = -5;
- }
- /* use UnmuteSignal function with negative slope */
- WebRtcNetEQ_UnmuteSignal(pw16_cngVec, &BGNState->w16_mutefactor, /* In Q14 */
- pw16_cngVec, muteFactor, /* In Q20 */
- w16_lag);
- }
- else if (BGNState->w16_mutefactor < 16384 && !BGNonly)
- {
- /* if (w16_mutefactor < 1) and not BGN only (since then we use no muting) */
-
- /*
- * If BGN_OFF, or if BNG_FADE has started fading,
- * mutefactor should not be increased.
- */
- if (ExpandState->w16_stopMuting != 1 && bgnMode != BGN_OFF && !(bgnMode
- == BGN_FADE && ExpandState->w16_consecExp >= FADE_BGN_TIME))
- {
- WebRtcNetEQ_UnmuteSignal(pw16_cngVec, &BGNState->w16_mutefactor, /* In Q14 */
- pw16_cngVec, ExpandState->w16_muteSlope, /* In Q20 */
- w16_lag);
- }
- else
- {
- /* BGN_ON and stop muting, or
- * BGN_OFF (mute factor is always 0), or
- * BGN_FADE has reached 0 */
- WebRtcSpl_AffineTransformVector(pw16_cngVec, pw16_cngVec,
- BGNState->w16_mutefactor, 8192, 14, w16_lag);
- }
- }
- }
- else
- {
- /* BGN parameters have not been initialized; use zero noise */
- WebRtcSpl_MemSetW16(pw16_cngVec, 0, w16_lag);
- }
-
- if (BGNonly)
- {
- /* Copy BGN to outdata */
- for (i = 0; i < w16_lag; i++)
- {
- pw16_outData[i] = pw16_cngVec[i];
- }
- }
- else
- {
- /* Add CNG vector to the Voiced + Unvoiced vectors */
- for (i = 0; i < w16_lag; i++)
- {
- pw16_outData[i] = pw16_outData[i] + pw16_cngVec[i];
- }
-
- /* increase call number */
- ExpandState->w16_consecExp = ExpandState->w16_consecExp + 1;
- if (ExpandState->w16_consecExp < 0) /* Guard against overflow */
- ExpandState->w16_consecExp = FADE_BGN_TIME; /* "Arbitrary" large num of expands */
- }
-
- inst->w16_mode = MODE_EXPAND;
- *pw16_len = w16_lag;
-
- /* Update in-call and post-call statistics */
- if (ExpandState->w16_stopMuting != 1 || BGNonly)
- {
- /*
- * Only do this if StopMuting != 1 or if explicitly BGNonly, otherwise Expand is
- * called from Merge or Normal and special measures must be taken.
- */
- inst->statInst.expandLength += (uint32_t) *pw16_len;
- if (ExpandState->w16_expandMuteFactor == 0 || BGNonly)
- {
- /* Only noise expansion */
- inst->statInst.expandedNoiseSamples += *pw16_len;
- /* Short-term activity statistics. */
- inst->activity_stats.expand_bgn_samples += *pw16_len;
- }
- else
- {
- /* Voice expand (note: not necessarily _voiced_) */
- inst->statInst.expandedVoiceSamples += *pw16_len;
- /* Short-term activity statistics. */
- inst->activity_stats.expand_normal_samples += *pw16_len;
- }
- }
-
- return 0;
-}
-
-/****************************************************************************
- * WebRtcNetEQ_GenerateBGN(...)
- *
- * This function generates and writes len samples of background noise to the
- * output vector. The Expand function will be called repeatedly until the
- * correct number of samples is produced.
- *
- * Input:
- * - inst : NetEq instance, i.e. the user that requests more
- * speech/audio data
- * - scratchPtr : Pointer to scratch vector
- * - len : Desired length of produced BGN.
- *
- *
- * Output:
- * - pw16_outData : Pointer to a memory space where the output data
- * should be stored
- *
- * Return value : >=0 - Number of noise samples produced and written
- * to output
- * -1 - Error
- */
-
-int WebRtcNetEQ_GenerateBGN(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_outData, int16_t len)
-{
-
- int16_t pos = 0;
- int16_t tempLen = len;
-
- while (tempLen > 0)
- {
- /* while we still need more noise samples, call Expand to obtain background noise */
- WebRtcNetEQ_Expand(inst,
-#ifdef SCRATCH
- pw16_scratchPtr,
-#endif
- &pw16_outData[pos], &tempLen, 1 /*BGNonly*/);
-
- pos += tempLen; /* we got this many samples */
- tempLen = len - pos; /* this is the number of samples we still need */
- }
-
- return pos;
-}
-
-#undef SCRATCH_PW16_BEST_CORR_INDEX
-#undef SCRATCH_PW16_BEST_CORR
-#undef SCRATCH_PW16_BEST_DIST_INDEX
-#undef SCRATCH_PW16_BEST_DIST
-#undef SCRATCH_PW16_CORR_VEC
-#undef SCRATCH_PW16_CORR2
-#undef SCRATCH_PW32_AUTO_CORR
-#undef SCRATCH_PW16_RC
-#undef SCRATCH_PW16_RAND_VEC
-#undef SCRATCH_NETEQDSP_CORRELATOR
-#undef SCRATCH_PW16_SCALED_RAND_VEC
-#undef SCRATCH_PW16_UNVOICED_VEC_SPACE
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/expand.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.cc
index 73f2ef85a56..14a77982234 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/expand.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/expand.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
#include <assert.h>
#include <string.h> // memset
@@ -17,10 +17,10 @@
#include <limits> // numeric_limits<T>
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
-#include "webrtc/modules/audio_coding/neteq4/dsp_helper.h"
-#include "webrtc/modules/audio_coding/neteq4/random_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
+#include "webrtc/modules/audio_coding/neteq/random_vector.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
namespace webrtc {
@@ -56,20 +56,9 @@ int Expand::Process(AudioMultiVector* output) {
// This is not the first expansion, parameters are already estimated.
// Extract a noise segment.
int16_t rand_length = max_lag_;
- // TODO(hlundin): This if-statement should not be needed. Should be just
- // as good to generate all of the vector in one call in either case.
- if (rand_length <= RandomVector::kRandomTableSize) {
- random_vector_->IncreaseSeedIncrement(2);
- random_vector_->Generate(rand_length, random_vector);
- } else {
- // This only applies to SWB where length could be larger than 256.
- assert(rand_length <= kMaxSampleRate / 8000 * 120 + 30);
- random_vector_->IncreaseSeedIncrement(2);
- random_vector_->Generate(RandomVector::kRandomTableSize, random_vector);
- random_vector_->IncreaseSeedIncrement(2);
- random_vector_->Generate(rand_length - RandomVector::kRandomTableSize,
- &random_vector[RandomVector::kRandomTableSize]);
- }
+ // This only applies to SWB where length could be larger than 256.
+ assert(rand_length <= kMaxSampleRate / 8000 * 120 + 30);
+ GenerateRandomVector(2, rand_length, random_vector);
}
@@ -262,82 +251,12 @@ int Expand::Process(AudioMultiVector* output) {
}
// Background noise part.
- // TODO(hlundin): Move to separate method? In BackgroundNoise class?
- if (background_noise_->initialized()) {
- // Use background noise parameters.
- memcpy(noise_vector - kNoiseLpcOrder,
- background_noise_->FilterState(channel_ix),
- sizeof(int16_t) * kNoiseLpcOrder);
-
- if (background_noise_->ScaleShift(channel_ix) > 1) {
- add_constant = 1 << (background_noise_->ScaleShift(channel_ix) - 1);
- } else {
- add_constant = 0;
- }
-
- // Scale random vector to correct energy level.
- WebRtcSpl_AffineTransformVector(
- scaled_random_vector, random_vector,
- background_noise_->Scale(channel_ix), add_constant,
- background_noise_->ScaleShift(channel_ix),
- static_cast<int>(current_lag));
-
- WebRtcSpl_FilterARFastQ12(scaled_random_vector, noise_vector,
- background_noise_->Filter(channel_ix),
- kNoiseLpcOrder + 1,
- static_cast<int>(current_lag));
-
- background_noise_->SetFilterState(
- channel_ix,
- &(noise_vector[current_lag - kNoiseLpcOrder]),
- kNoiseLpcOrder);
-
- // Unmute the background noise.
- int16_t bgn_mute_factor = background_noise_->MuteFactor(channel_ix);
- NetEqBackgroundNoiseMode bgn_mode = background_noise_->mode();
- if (bgn_mode == kBgnFade &&
- consecutive_expands_ >= kMaxConsecutiveExpands &&
- bgn_mute_factor > 0) {
- // Fade BGN to zero.
- // Calculate muting slope, approximately -2^18 / fs_hz.
- int16_t mute_slope;
- if (fs_hz_ == 8000) {
- mute_slope = -32;
- } else if (fs_hz_ == 16000) {
- mute_slope = -16;
- } else if (fs_hz_ == 32000) {
- mute_slope = -8;
- } else {
- mute_slope = -5;
- }
- // Use UnmuteSignal function with negative slope.
- // |bgn_mute_factor| is in Q14. |mute_slope| is in Q20.
- DspHelper::UnmuteSignal(noise_vector, current_lag, &bgn_mute_factor,
- mute_slope, noise_vector);
- } else if (bgn_mute_factor < 16384) {
- // If mode is kBgnOff, or if kBgnFade has started fading,
- // Use regular |mute_slope|.
- if (!stop_muting_ && bgn_mode != kBgnOff &&
- !(bgn_mode == kBgnFade &&
- consecutive_expands_ >= kMaxConsecutiveExpands)) {
- DspHelper::UnmuteSignal(noise_vector, static_cast<int>(current_lag),
- &bgn_mute_factor, parameters.mute_slope,
- noise_vector);
- } else {
- // kBgnOn and stop muting, or
- // kBgnOff (mute factor is always 0), or
- // kBgnFade has reached 0.
- WebRtcSpl_AffineTransformVector(noise_vector, noise_vector,
- bgn_mute_factor, 8192, 14,
- static_cast<int>(current_lag));
- }
- }
- // Update mute_factor in BackgroundNoise class.
- background_noise_->SetMuteFactor(channel_ix, bgn_mute_factor);
- } else {
- // BGN parameters have not been initialized; use zero noise.
- memset(noise_vector, 0, sizeof(int16_t) * current_lag);
- }
+ GenerateBackgroundNoise(random_vector,
+ channel_ix,
+ channel_parameters_[channel_ix].mute_slope,
+ TooManyExpands(),
+ current_lag,
+ unvoiced_array_memory);
// Add background noise to the combined voiced-unvoiced signal.
for (size_t i = 0; i < current_lag; i++) {
@@ -353,11 +272,8 @@ int Expand::Process(AudioMultiVector* output) {
}
// Increase call number and cap it.
- ++consecutive_expands_;
- if (consecutive_expands_ > kMaxConsecutiveExpands) {
- consecutive_expands_ = kMaxConsecutiveExpands;
- }
-
+ consecutive_expands_ = consecutive_expands_ >= kMaxConsecutiveExpands ?
+ kMaxConsecutiveExpands : consecutive_expands_ + 1;
return 0;
}
@@ -373,6 +289,24 @@ void Expand::SetParametersForMergeAfterExpand() {
stop_muting_ = true;
}
+void Expand::InitializeForAnExpandPeriod() {
+ lag_index_direction_ = 1;
+ current_lag_index_ = -1;
+ stop_muting_ = false;
+ random_vector_->set_seed_increment(1);
+ consecutive_expands_ = 0;
+ for (size_t ix = 0; ix < num_channels_; ++ix) {
+ channel_parameters_[ix].current_voice_mix_factor = 16384; // 1.0 in Q14.
+ channel_parameters_[ix].mute_factor = 16384; // 1.0 in Q14.
+ // Start with 0 gain for background noise.
+ background_noise_->SetMuteFactor(ix, 0);
+ }
+}
+
+bool Expand::TooManyExpands() {
+ return consecutive_expands_ >= kMaxConsecutiveExpands;
+}
+
void Expand::AnalyzeSignal(int16_t* random_vector) {
int32_t auto_correlation[kUnvoicedLpcOrder + 1];
int16_t reflection_coeff[kUnvoicedLpcOrder];
@@ -400,18 +334,8 @@ void Expand::AnalyzeSignal(int16_t* random_vector) {
const int16_t* audio_history =
&(*sync_buffer_)[0][sync_buffer_->Size() - signal_length];
- // Initialize some member variables.
- lag_index_direction_ = 1;
- current_lag_index_ = -1;
- stop_muting_ = false;
- random_vector_->set_seed_increment(1);
- consecutive_expands_ = 0;
- for (size_t ix = 0; ix < num_channels_; ++ix) {
- channel_parameters_[ix].current_voice_mix_factor = 16384; // 1.0 in Q14.
- channel_parameters_[ix].mute_factor = 16384; // 1.0 in Q14.
- // Start with 0 gain for background noise.
- background_noise_->SetMuteFactor(ix, 0);
- }
+ // Initialize.
+ InitializeForAnExpandPeriod();
// Calculate correlation in downsampled domain (4 kHz sample rate).
int16_t correlation_scale;
@@ -864,4 +788,117 @@ void Expand::UpdateLagIndex() {
}
}
+Expand* ExpandFactory::Create(BackgroundNoise* background_noise,
+ SyncBuffer* sync_buffer,
+ RandomVector* random_vector,
+ int fs,
+ size_t num_channels) const {
+ return new Expand(background_noise, sync_buffer, random_vector, fs,
+ num_channels);
+}
+
+// TODO(turajs): This can be moved to BackgroundNoise class.
+void Expand::GenerateBackgroundNoise(int16_t* random_vector,
+ size_t channel,
+ int16_t mute_slope,
+ bool too_many_expands,
+ size_t num_noise_samples,
+ int16_t* buffer) {
+ static const int kNoiseLpcOrder = BackgroundNoise::kMaxLpcOrder;
+ int16_t scaled_random_vector[kMaxSampleRate / 8000 * 125];
+ assert(kMaxSampleRate / 8000 * 125 >= (int)num_noise_samples);
+ int16_t* noise_samples = &buffer[kNoiseLpcOrder];
+ if (background_noise_->initialized()) {
+ // Use background noise parameters.
+ memcpy(noise_samples - kNoiseLpcOrder,
+ background_noise_->FilterState(channel),
+ sizeof(int16_t) * kNoiseLpcOrder);
+
+ int dc_offset = 0;
+ if (background_noise_->ScaleShift(channel) > 1) {
+ dc_offset = 1 << (background_noise_->ScaleShift(channel) - 1);
+ }
+
+ // Scale random vector to correct energy level.
+ WebRtcSpl_AffineTransformVector(
+ scaled_random_vector, random_vector,
+ background_noise_->Scale(channel), dc_offset,
+ background_noise_->ScaleShift(channel),
+ static_cast<int>(num_noise_samples));
+
+ WebRtcSpl_FilterARFastQ12(scaled_random_vector, noise_samples,
+ background_noise_->Filter(channel),
+ kNoiseLpcOrder + 1,
+ static_cast<int>(num_noise_samples));
+
+ background_noise_->SetFilterState(
+ channel,
+ &(noise_samples[num_noise_samples - kNoiseLpcOrder]),
+ kNoiseLpcOrder);
+
+ // Unmute the background noise.
+ int16_t bgn_mute_factor = background_noise_->MuteFactor(channel);
+ NetEqBackgroundNoiseMode bgn_mode = background_noise_->mode();
+ if (bgn_mode == kBgnFade && too_many_expands && bgn_mute_factor > 0) {
+ // Fade BGN to zero.
+ // Calculate muting slope, approximately -2^18 / fs_hz.
+ int16_t mute_slope;
+ if (fs_hz_ == 8000) {
+ mute_slope = -32;
+ } else if (fs_hz_ == 16000) {
+ mute_slope = -16;
+ } else if (fs_hz_ == 32000) {
+ mute_slope = -8;
+ } else {
+ mute_slope = -5;
+ }
+ // Use UnmuteSignal function with negative slope.
+ // |bgn_mute_factor| is in Q14. |mute_slope| is in Q20.
+ DspHelper::UnmuteSignal(noise_samples,
+ num_noise_samples,
+ &bgn_mute_factor,
+ mute_slope,
+ noise_samples);
+ } else if (bgn_mute_factor < 16384) {
+ // If mode is kBgnOff, or if kBgnFade has started fading,
+ // Use regular |mute_slope|.
+ if (!stop_muting_ && bgn_mode != kBgnOff &&
+ !(bgn_mode == kBgnFade && too_many_expands)) {
+ DspHelper::UnmuteSignal(noise_samples,
+ static_cast<int>(num_noise_samples),
+ &bgn_mute_factor,
+ mute_slope,
+ noise_samples);
+ } else {
+ // kBgnOn and stop muting, or
+ // kBgnOff (mute factor is always 0), or
+ // kBgnFade has reached 0.
+ WebRtcSpl_AffineTransformVector(noise_samples, noise_samples,
+ bgn_mute_factor, 8192, 14,
+ static_cast<int>(num_noise_samples));
+ }
+ }
+ // Update mute_factor in BackgroundNoise class.
+ background_noise_->SetMuteFactor(channel, bgn_mute_factor);
+ } else {
+ // BGN parameters have not been initialized; use zero noise.
+ memset(noise_samples, 0, sizeof(int16_t) * num_noise_samples);
+ }
+}
+
+void Expand::GenerateRandomVector(int seed_increment,
+ size_t length,
+ int16_t* random_vector) {
+ // TODO(turajs): According to hlundin The loop should not be needed. Should be
+ // just as good to generate all of the vector in one call.
+ size_t samples_generated = 0;
+ const size_t kMaxRandSamples = RandomVector::kRandomTableSize;
+ while(samples_generated < length) {
+ size_t rand_length = std::min(length - samples_generated, kMaxRandSamples);
+ random_vector_->IncreaseSeedIncrement(seed_increment);
+ random_vector_->Generate(rand_length, &random_vector[samples_generated]);
+ samples_generated += rand_length;
+ }
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/expand.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.h
index 25ae61903a6..1acf951b980 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/expand.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand.h
@@ -8,13 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_EXPAND_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_EXPAND_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_EXPAND_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_EXPAND_H_
#include <assert.h>
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
@@ -36,12 +36,13 @@ class Expand {
RandomVector* random_vector,
int fs,
size_t num_channels)
- : background_noise_(background_noise),
+ : random_vector_(random_vector),
sync_buffer_(sync_buffer),
- random_vector_(random_vector),
first_expand_(true),
fs_hz_(fs),
num_channels_(num_channels),
+ consecutive_expands_(0),
+ background_noise_(background_noise),
overlap_length_(5 * fs / 8000),
lag_index_direction_(0),
current_lag_index_(0),
@@ -57,19 +58,19 @@ class Expand {
virtual ~Expand() {}
// Resets the object.
- void Reset();
+ virtual void Reset();
// The main method to produce concealment data. The data is appended to the
// end of |output|.
- int Process(AudioMultiVector* output);
+ virtual int Process(AudioMultiVector* output);
// Prepare the object to do extra expansion during normal operation following
// a period of expands.
- void SetParametersForNormalAfterExpand();
+ virtual void SetParametersForNormalAfterExpand();
// Prepare the object to do extra expansion during merge operation following
// a period of expands.
- void SetParametersForMergeAfterExpand();
+ virtual void SetParametersForMergeAfterExpand();
// Sets the mute factor for |channel| to |value|.
void SetMuteFactor(int16_t value, size_t channel) {
@@ -84,9 +85,38 @@ class Expand {
}
// Accessors and mutators.
- size_t overlap_length() const { return overlap_length_; }
+ virtual size_t overlap_length() const { return overlap_length_; }
int16_t max_lag() const { return max_lag_; }
+ protected:
+ static const int kMaxConsecutiveExpands = 200;
+ void GenerateRandomVector(int seed_increment,
+ size_t length,
+ int16_t* random_vector);
+
+ void GenerateBackgroundNoise(int16_t* random_vector,
+ size_t channel,
+ int16_t mute_slope,
+ bool too_many_expands,
+ size_t num_noise_samples,
+ int16_t* buffer);
+
+ // Initializes member variables at the beginning of an expand period.
+ void InitializeForAnExpandPeriod();
+
+ bool TooManyExpands();
+
+ // Analyzes the signal history in |sync_buffer_|, and set up all parameters
+ // necessary to produce concealment data.
+ void AnalyzeSignal(int16_t* random_vector);
+
+ RandomVector* random_vector_;
+ SyncBuffer* sync_buffer_;
+ bool first_expand_;
+ const int fs_hz_;
+ const size_t num_channels_;
+ int consecutive_expands_;
+
private:
static const int kUnvoicedLpcOrder = 6;
static const int kNumCorrelationCandidates = 3;
@@ -94,7 +124,6 @@ class Expand {
static const int kLpcAnalysisLength = 160;
static const int kMaxSampleRate = 48000;
static const int kNumLags = 3;
- static const int kMaxConsecutiveExpands = 200;
struct ChannelParameters {
// Constructor.
@@ -122,10 +151,6 @@ class Expand {
int16_t mute_slope; /* Q20 */
};
- // Analyze the signal history in |sync_buffer_|, and set up all parameters
- // necessary to produce concealment data.
- void AnalyzeSignal(int16_t* random_vector);
-
// Calculate the auto-correlation of |input|, with length |input_length|
// samples. The correlation is calculated from a downsampled version of
// |input|, and is written to |output|. The scale factor is written to
@@ -136,22 +161,27 @@ class Expand {
void UpdateLagIndex();
BackgroundNoise* background_noise_;
- SyncBuffer* sync_buffer_;
- RandomVector* random_vector_;
- bool first_expand_;
- int fs_hz_;
- size_t num_channels_;
- size_t overlap_length_;
- int consecutive_expands_;
+ const size_t overlap_length_;
int16_t max_lag_;
size_t expand_lags_[kNumLags];
int lag_index_direction_;
int current_lag_index_;
bool stop_muting_;
- scoped_array<ChannelParameters> channel_parameters_;
+ scoped_ptr<ChannelParameters[]> channel_parameters_;
DISALLOW_COPY_AND_ASSIGN(Expand);
};
+struct ExpandFactory {
+ ExpandFactory() {}
+ virtual ~ExpandFactory() {}
+
+ virtual Expand* Create(BackgroundNoise* background_noise,
+ SyncBuffer* sync_buffer,
+ RandomVector* random_vector,
+ int fs,
+ size_t num_channels) const;
+};
+
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_EXPAND_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_EXPAND_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/expand_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand_unittest.cc
index a63ed142f03..bd39f408ffa 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/expand_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/expand_unittest.cc
@@ -10,12 +10,12 @@
// Unit tests for Expand class.
-#include "webrtc/modules/audio_coding/neteq4/expand.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
-#include "webrtc/modules/audio_coding/neteq4/random_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/random_vector.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
namespace webrtc {
@@ -28,6 +28,19 @@ TEST(Expand, CreateAndDestroy) {
Expand expand(&bgn, &sync_buffer, &random_vector, fs, channels);
}
+TEST(Expand, CreateUsingFactory) {
+ int fs = 8000;
+ size_t channels = 1;
+ BackgroundNoise bgn(channels);
+ SyncBuffer sync_buffer(1, 1000);
+ RandomVector random_vector;
+ ExpandFactory expand_factory;
+ Expand* expand =
+ expand_factory.Create(&bgn, &sync_buffer, &random_vector, fs, channels);
+ EXPECT_TRUE(expand != NULL);
+ delete expand;
+}
+
// TODO(hlundin): Write more tests.
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/audio_decoder.h
index f3bcc711f36..9a2fb8b4645 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/audio_decoder.h
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_INTERFACE_AUDIO_DECODER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_INTERFACE_AUDIO_DECODER_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_INTERFACE_AUDIO_DECODER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_INTERFACE_AUDIO_DECODER_H_
#include <stdlib.h> // NULL
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -108,6 +108,17 @@ class AudioDecoder {
// is available, or -1 in case of an error.
virtual int PacketDuration(const uint8_t* encoded, size_t encoded_len);
+ // Returns the duration in samples of the redandant payload in |encoded| which
+ // is |encoded_len| bytes long. Returns kNotImplemented if no duration
+ // estimate is available, or -1 in case of an error.
+ virtual int PacketDurationRedundant(const uint8_t* encoded,
+ size_t encoded_len) const;
+
+ // Detects whether a packet has forward error correction. The packet is
+ // comprised of the samples in |encoded| which is |encoded_len| bytes long.
+ // Returns true if the packet has FEC and false otherwise.
+ virtual bool PacketHasFec(const uint8_t* encoded, size_t encoded_len) const;
+
virtual NetEqDecoder codec_type() const;
// Returns the underlying decoder state.
@@ -138,4 +149,4 @@ class AudioDecoder {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_INTERFACE_AUDIO_DECODER_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_INTERFACE_AUDIO_DECODER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/interface/neteq.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/neteq.h
index 617393093f3..c67ab12c6ce 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/interface/neteq.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/neteq.h
@@ -8,16 +8,16 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_INTERFACE_NETEQ_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_INTERFACE_NETEQ_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_INTERFACE_NETEQ_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_INTERFACE_NETEQ_H_
#include <string.h> // Provide access to size_t.
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/common_types.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -67,6 +67,20 @@ enum NetEqBackgroundNoiseMode {
// This is the interface class for NetEq.
class NetEq {
public:
+ struct Config {
+ Config()
+ : sample_rate_hz(16000),
+ enable_audio_classifier(false),
+ max_packets_in_buffer(50),
+ // |max_delay_ms| has the same effect as calling SetMaximumDelay().
+ max_delay_ms(2000) {}
+
+ int sample_rate_hz; // Initial vale. Will change with input data.
+ bool enable_audio_classifier;
+ int max_packets_in_buffer;
+ int max_delay_ms;
+ };
+
enum ReturnCodes {
kOK = 0,
kFail = -1,
@@ -98,17 +112,13 @@ class NetEq {
kFrameSplitError,
kRedundancySplitError,
kPacketBufferCorruption,
- kOversizePacket,
kSyncPacketNotAccepted
};
- static const int kMaxNumPacketsInBuffer = 240; // TODO(hlundin): Remove.
- static const int kMaxBytesInBuffer = 113280; // TODO(hlundin): Remove.
-
- // Creates a new NetEq object, starting at the sample rate |sample_rate_hz|.
- // (Note that it will still change the sample rate depending on what payloads
- // are being inserted; |sample_rate_hz| is just for startup configuration.)
- static NetEq* Create(int sample_rate_hz);
+ // Creates a new NetEq object, with parameters set in |config|. The |config|
+ // object will only have to be valid for the duration of the call to this
+ // method.
+ static NetEq* Create(const NetEq::Config& config);
virtual ~NetEq() {}
@@ -152,11 +162,10 @@ class NetEq {
// Provides an externally created decoder object |decoder| to insert in the
// decoder database. The decoder implements a decoder of type |codec| and
- // associates it with |rtp_payload_type|. The decoder operates at the
- // frequency |sample_rate_hz|. Returns kOK on success, kFail on failure.
+ // associates it with |rtp_payload_type|. Returns kOK on success,
+ // kFail on failure.
virtual int RegisterExternalDecoder(AudioDecoder* decoder,
enum NetEqDecoder codec,
- int sample_rate_hz,
uint8_t rtp_payload_type) = 0;
// Removes |rtp_payload_type| from the codec database. Returns 0 on success,
@@ -171,7 +180,8 @@ class NetEq {
// Sets a maximum delay in milliseconds for packet buffer. The latency will
// not exceed the given value, even required delay (given the channel
- // conditions) is higher.
+ // conditions) is higher. Calling this method has the same effect as setting
+ // the |max_delay_ms| value in the NetEq::Config struct.
virtual bool SetMaximumDelay(int delay_ms) = 0;
// The smallest latency required. This is computed bases on inter-arrival
@@ -218,8 +228,9 @@ class NetEq {
// Disables post-decode VAD.
virtual void DisableVad() = 0;
- // Returns the RTP timestamp for the last sample delivered by GetAudio().
- virtual uint32_t PlayoutTimestamp() = 0;
+ // Gets the RTP timestamp for the last sample delivered by GetAudio().
+ // Returns true if the RTP timestamp is valid, otherwise false.
+ virtual bool GetPlayoutTimestamp(uint32_t* timestamp) = 0;
// Not implemented.
virtual int SetTargetNumberOfChannels() = 0;
@@ -241,9 +252,7 @@ class NetEq {
// Current usage of packet-buffer and it's limits.
virtual void PacketBufferStatistics(int* current_num_packets,
- int* max_num_packets,
- int* current_memory_size_bytes,
- int* max_memory_size_bytes) const = 0;
+ int* max_num_packets) const = 0;
// Get sequence number and timestamp of the latest RTP.
// This method is to facilitate NACK.
@@ -264,4 +273,4 @@ class NetEq {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_INTERFACE_NETEQ_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_INTERFACE_NETEQ_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h
deleted file mode 100644
index c2a01340b93..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h
+++ /dev/null
@@ -1,230 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This is the main API for NetEQ. Helper macros are located in webrtc_neteq_help_macros.h,
- * while some internal API functions are found in webrtc_neteq_internal.h.
- */
-
-#include "typedefs.h"
-
-#ifndef WEBRTC_NETEQ_H
-#define WEBRTC_NETEQ_H
-
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-/**********************************************************
- * Definitions
- */
-
-enum WebRtcNetEQDecoder
-{
- kDecoderReservedStart,
- kDecoderPCMu,
- kDecoderPCMa,
- kDecoderPCMu_2ch,
- kDecoderPCMa_2ch,
- kDecoderILBC,
- kDecoderISAC,
- kDecoderISACswb,
- kDecoderISACfb,
- kDecoderPCM16B,
- kDecoderPCM16Bwb,
- kDecoderPCM16Bswb32kHz,
- kDecoderPCM16Bswb48kHz,
- kDecoderPCM16B_2ch,
- kDecoderPCM16Bwb_2ch,
- kDecoderPCM16Bswb32kHz_2ch,
- kDecoderG722,
- kDecoderG722_2ch,
- kDecoderRED,
- kDecoderAVT,
- kDecoderCNG,
- kDecoderArbitrary,
- kDecoderG729,
- kDecoderG729_1,
- kDecoderG726_16,
- kDecoderG726_24,
- kDecoderG726_32,
- kDecoderG726_40,
- kDecoderG722_1_16,
- kDecoderG722_1_24,
- kDecoderG722_1_32,
- kDecoderG722_1C_24,
- kDecoderG722_1C_32,
- kDecoderG722_1C_48,
- kDecoderOpus,
- kDecoderSPEEX_8,
- kDecoderSPEEX_16,
- kDecoderCELT_32,
- kDecoderCELT_32_2ch,
- kDecoderGSMFR,
- kDecoderAMR,
- kDecoderAMRWB,
- kDecoderReservedEnd
-};
-
-enum WebRtcNetEQNetworkType
-{
- kUDPNormal,
- kUDPVideoSync,
- kTCPNormal,
- kTCPLargeJitter,
- kTCPXLargeJitter
-};
-
-enum WebRtcNetEQOutputType
-{
- kOutputNormal,
- kOutputPLC,
- kOutputCNG,
- kOutputPLCtoCNG,
- kOutputVADPassive
-};
-
-enum WebRtcNetEQPlayoutMode
-{
- kPlayoutOn, kPlayoutOff, kPlayoutFax, kPlayoutStreaming
-};
-
-/* Available modes for background noise (inserted after long expands) */
-enum WebRtcNetEQBGNMode
-{
- kBGNOn, /* default "normal" behavior with eternal noise */
- kBGNFade, /* noise fades to zero after some time */
- kBGNOff
-/* background noise is always zero */
-};
-
-/*************************************************
- * Definitions of decoder calls and the default
- * API function calls for each codec
- */
-
-typedef int16_t (*WebRtcNetEQ_FuncDecode)(void* state, int16_t* encoded,
- int16_t len, int16_t* decoded,
- int16_t* speechType);
-typedef int16_t (*WebRtcNetEQ_FuncDecodePLC)(void* state, int16_t* decoded,
- int16_t frames);
-typedef int16_t (*WebRtcNetEQ_FuncDecodeInit)(void* state);
-typedef int16_t (*WebRtcNetEQ_FuncAddLatePkt)(void* state, int16_t* encoded,
- int16_t len);
-typedef int16_t (*WebRtcNetEQ_FuncGetMDinfo)(void* state);
-typedef int16_t (*WebRtcNetEQ_FuncGetPitchInfo)(void* state, int16_t* encoded,
- int16_t* length);
-typedef int16_t (*WebRtcNetEQ_FuncUpdBWEst)(void* state, const uint16_t *encoded,
- int32_t packet_size,
- uint16_t rtp_seq_number,
- uint32_t send_ts,
- uint32_t arr_ts);
-typedef int (*WebRtcNetEQ_FuncDurationEst)(void* state, const uint8_t* payload,
- int payload_length_bytes);
-typedef int16_t (*WebRtcNetEQ_FuncGetErrorCode)(void* state);
-
-/**********************************************************
- * Structures
- */
-
-typedef struct
-{
- enum WebRtcNetEQDecoder codec;
- int16_t payloadType;
- WebRtcNetEQ_FuncDecode funcDecode;
- WebRtcNetEQ_FuncDecode funcDecodeRCU;
- WebRtcNetEQ_FuncDecodePLC funcDecodePLC;
- WebRtcNetEQ_FuncDecodeInit funcDecodeInit;
- WebRtcNetEQ_FuncAddLatePkt funcAddLatePkt;
- WebRtcNetEQ_FuncGetMDinfo funcGetMDinfo;
- WebRtcNetEQ_FuncGetPitchInfo funcGetPitch;
- WebRtcNetEQ_FuncUpdBWEst funcUpdBWEst;
- WebRtcNetEQ_FuncDurationEst funcDurationEst;
- WebRtcNetEQ_FuncGetErrorCode funcGetErrorCode;
- void* codec_state;
- uint16_t codec_fs;
-} WebRtcNetEQ_CodecDef;
-
-typedef struct
-{
- uint16_t fraction_lost;
- uint32_t cum_lost;
- uint32_t ext_max;
- uint32_t jitter;
-} WebRtcNetEQ_RTCPStat;
-
-/**********************************************************
- * NETEQ Functions
- */
-
-/* Info functions */
-
-#define WEBRTC_NETEQ_MAX_ERROR_NAME 40
-int WebRtcNetEQ_GetErrorCode(void *inst);
-int WebRtcNetEQ_GetErrorName(int errorCode, char *errorName, int maxStrLen);
-
-/* Instance memory assign functions */
-
-int WebRtcNetEQ_AssignSize(int *sizeinbytes);
-int WebRtcNetEQ_Assign(void **inst, void *NETEQ_inst_Addr);
-int WebRtcNetEQ_GetRecommendedBufferSize(void *inst, const enum WebRtcNetEQDecoder *codec,
- int noOfCodecs, enum WebRtcNetEQNetworkType nwType,
- int *MaxNoOfPackets, int *sizeinbytes,
- int* per_packet_overhead_bytes);
-int WebRtcNetEQ_AssignBuffer(void *inst, int MaxNoOfPackets, void *NETEQ_Buffer_Addr,
- int sizeinbytes);
-
-/* Init functions */
-
-int WebRtcNetEQ_Init(void *inst, uint16_t fs);
-int WebRtcNetEQ_SetAVTPlayout(void *inst, int PlayoutAVTon);
-int WebRtcNetEQ_SetExtraDelay(void *inst, int DelayInMs);
-int WebRtcNetEQ_SetPlayoutMode(void *inst, enum WebRtcNetEQPlayoutMode playoutMode);
-int WebRtcNetEQ_SetBGNMode(void *inst, enum WebRtcNetEQBGNMode bgnMode);
-int WebRtcNetEQ_GetBGNMode(const void *inst, enum WebRtcNetEQBGNMode *bgnMode);
-
-/* Codec Database functions */
-
-int WebRtcNetEQ_CodecDbReset(void *inst);
-int WebRtcNetEQ_CodecDbAdd(void *inst, WebRtcNetEQ_CodecDef *codecInst);
-int WebRtcNetEQ_CodecDbRemove(void *inst, enum WebRtcNetEQDecoder codec);
-int WebRtcNetEQ_CodecDbGetSizeInfo(void *inst, int16_t *UsedEntries,
- int16_t *MaxEntries);
-int WebRtcNetEQ_CodecDbGetCodecInfo(void *inst, int16_t Entry,
- enum WebRtcNetEQDecoder *codec);
-
-/* Real-time functions */
-
-int WebRtcNetEQ_RecIn(void *inst, int16_t *p_w16datagramstart, int16_t w16_RTPlen,
- uint32_t uw32_timeRec);
-int WebRtcNetEQ_RecOut(void *inst, int16_t *pw16_outData, int16_t *pw16_len);
-int WebRtcNetEQ_GetRTCPStats(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst);
-int WebRtcNetEQ_GetRTCPStatsNoReset(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst);
-int WebRtcNetEQ_GetSpeechTimeStamp(void *inst, uint32_t *timestamp);
-int WebRtcNetEQ_DecodedRtpInfo(const void* inst,
- int* sequence_number,
- uint32_t* timestamp);
-int WebRtcNetEQ_GetSpeechOutputType(void *inst, enum WebRtcNetEQOutputType *outputType);
-
-/* VQmon related functions */
-int WebRtcNetEQ_VQmonRecOutStatistics(void *inst, uint16_t *validVoiceDurationMs,
- uint16_t *concealedVoiceDurationMs,
- uint8_t *concealedVoiceFlags);
-int WebRtcNetEQ_VQmonGetConfiguration(void *inst, uint16_t *absMaxDelayMs,
- uint8_t *adaptationRate);
-int WebRtcNetEQ_VQmonGetRxStatistics(void *inst, uint16_t *avgDelayMs,
- uint16_t *maxDelayMs);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h
deleted file mode 100644
index bd93328108e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h
+++ /dev/null
@@ -1,454 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains some helper macros that can be used when loading the
- * NetEQ codec database.
- */
-
-#ifndef WEBRTC_NETEQ_HELP_MACROS_H
-#define WEBRTC_NETEQ_HELP_MACROS_H
-
-#ifndef NULL
-#define NULL 0
-#endif
-
-/**********************************************************
- * Help macros for NetEQ initialization
- */
-
-#define SET_CODEC_PAR(inst,decoder,pt,state,fs) \
- inst.codec=decoder; \
- inst.payloadType=pt; \
- inst.codec_state=state; \
- inst.codec_fs=fs;
-
-#define SET_PCMU_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG711_DecodeU; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=NULL; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=WebRtcG711_DurationEst; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_PCMA_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG711_DecodeA; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=NULL; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=WebRtcG711_DurationEst; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_ILBC_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIlbcfix_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcIlbcfix_NetEqPlc; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIlbcfix_Decoderinit30Ms; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_ISAC_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIsac_Decode; \
- inst.funcDecodeRCU=(WebRtcNetEQ_FuncDecode)WebRtcIsac_DecodeRcu; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIsac_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcIsac_UpdateBwEstimate; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=(WebRtcNetEQ_FuncGetErrorCode)WebRtcIsac_GetErrorCode;
-
-#define SET_ISACfix_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIsacfix_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIsacfix_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcIsacfix_UpdateBwEstimate; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=(WebRtcNetEQ_FuncGetErrorCode)WebRtcIsacfix_GetErrorCode;
-
-#define SET_ISACSWB_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIsac_Decode; \
- inst.funcDecodeRCU=(WebRtcNetEQ_FuncDecode)WebRtcIsac_DecodeRcu; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIsac_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcIsac_UpdateBwEstimate; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=(WebRtcNetEQ_FuncGetErrorCode)WebRtcIsac_GetErrorCode;
-
-#define SET_ISACFB_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIsac_Decode; \
- inst.funcDecodeRCU=(WebRtcNetEQ_FuncDecode)WebRtcIsac_DecodeRcu; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIsac_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcIsac_UpdateBwEstimate; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=(WebRtcNetEQ_FuncGetErrorCode)WebRtcIsac_GetErrorCode;
-
-#define SET_G729_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG729_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG729_DecodePlc; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG729_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G729_1_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7291_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7291_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcG7291_DecodeBwe; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_PCM16B_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=NULL; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_PCM16B_WB_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=NULL; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_PCM16B_SWB32_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=NULL; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_PCM16B_SWB48_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=NULL; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G722_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG722_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG722_DecoderInit;\
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G722_1_16_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221_Decode16; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221_DecodePlc16; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221_DecoderInit16; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G722_1_24_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221_Decode24; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221_DecodePlc24; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221_DecoderInit24; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G722_1_32_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221_Decode32; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221_DecodePlc32; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221_DecoderInit32; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G722_1C_24_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221C_Decode24; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221C_DecodePlc24; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221C_DecoderInit24; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G722_1C_32_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221C_Decode32; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221C_DecodePlc32; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221C_DecoderInit32; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G722_1C_48_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221C_Decode48; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221C_DecodePlc48; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221C_DecoderInit48; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_AMR_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcAmr_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcAmr_DecodePlc; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcAmr_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_AMRWB_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcAmrWb_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcAmrWb_DecodePlc; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcAmrWb_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_GSMFR_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcGSMFR_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcGSMFR_DecodePlc; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcGSMFR_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G726_16_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode16; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit16; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G726_24_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode24; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit24; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G726_32_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode32; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit32; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_G726_40_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode40; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit40; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_OPUS_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcOpus_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcOpus_DecodePlcMaster; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcOpus_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=(WebRtcNetEQ_FuncDurationEst)WebRtcOpus_DurationEst; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_OPUSSLAVE_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcOpus_DecodeSlave; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcOpus_DecodePlcSlave; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcOpus_DecoderInitSlave; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=(WebRtcNetEQ_FuncDurationEst)WebRtcOpus_DurationEst; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_SPEEX_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcSpeex_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcSpeex_DecodePlc; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcSpeex_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_CELT_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcCelt_Decode; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcCelt_DecoderInit; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_CELTSLAVE_FUNCTIONS(inst) \
- inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcCelt_DecodeSlave; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcCelt_DecoderInitSlave; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_RED_FUNCTIONS(inst) \
- inst.funcDecode=NULL; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=NULL; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_AVT_FUNCTIONS(inst) \
- inst.funcDecode=NULL; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=NULL; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#define SET_CNG_FUNCTIONS(inst) \
- inst.funcDecode=NULL; \
- inst.funcDecodeRCU=NULL; \
- inst.funcDecodePLC=NULL; \
- inst.funcDecodeInit=NULL; \
- inst.funcAddLatePkt=NULL; \
- inst.funcGetMDinfo=NULL; \
- inst.funcGetPitch=NULL; \
- inst.funcUpdBWEst=NULL; \
- inst.funcDurationEst=NULL; \
- inst.funcGetErrorCode=NULL;
-
-#endif /* WEBRTC_NETEQ_HELP_MACROS_H */
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h
deleted file mode 100644
index c46a3f62705..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h
+++ /dev/null
@@ -1,336 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the internal API functions.
- */
-
-#include "typedefs.h"
-
-#ifndef WEBRTC_NETEQ_INTERNAL_H
-#define WEBRTC_NETEQ_INTERNAL_H
-
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-typedef struct
-{
- uint8_t payloadType;
- uint16_t sequenceNumber;
- uint32_t timeStamp;
- uint32_t SSRC;
- uint8_t markerBit;
-} WebRtcNetEQ_RTPInfo;
-
-/****************************************************************************
- * WebRtcNetEQ_RecInRTPStruct(...)
- *
- * Alternative RecIn function, used when the RTP data has already been
- * parsed into an RTP info struct (WebRtcNetEQ_RTPInfo).
- *
- * Input:
- * - inst : NetEQ instance
- * - rtpInfo : Pointer to RTP info
- * - payloadPtr : Pointer to the RTP payload (first byte after header)
- * - payloadLenBytes : Length (in bytes) of the payload in payloadPtr
- * - timeRec : Receive time (in timestamps of the used codec)
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcNetEQ_RecInRTPStruct(void *inst, WebRtcNetEQ_RTPInfo *rtpInfo,
- const uint8_t *payloadPtr, int16_t payloadLenBytes,
- uint32_t timeRec);
-
-/****************************************************************************
- * WebRtcNetEQ_GetMasterSlaveInfoSize(...)
- *
- * Get size in bytes for master/slave struct msInfo used in
- * WebRtcNetEQ_RecOutMasterSlave.
- *
- * Return value : Struct size in bytes
- *
- */
-
-int WebRtcNetEQ_GetMasterSlaveInfoSize();
-
-/****************************************************************************
- * WebRtcNetEQ_RecOutMasterSlave(...)
- *
- * RecOut function for running several NetEQ instances in master/slave mode.
- * One master can be used to control several slaves.
- * The MasterSlaveInfo struct must be allocated outside NetEQ.
- * Use function WebRtcNetEQ_GetMasterSlaveInfoSize to get the size needed.
- *
- * Input:
- * - inst : NetEQ instance
- * - isMaster : Non-zero indicates that this is the master channel
- * - msInfo : (slave only) Information from master
- *
- * Output:
- * - inst : Updated NetEQ instance
- * - pw16_outData : Pointer to vector where output should be written
- * - pw16_len : Pointer to variable where output length is returned
- * - msInfo : (master only) Information to slave(s)
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RecOutMasterSlave(void *inst, int16_t *pw16_outData,
- int16_t *pw16_len, void *msInfo,
- int16_t isMaster);
-
-typedef struct
-{
- uint16_t currentBufferSize; /* Current jitter buffer size in ms. */
- uint16_t preferredBufferSize; /* Preferred buffer size in ms. */
- uint16_t jitterPeaksFound; /* 1 if adding extra delay due to peaky
- * jitter; 0 otherwise. */
- uint16_t currentPacketLossRate; /* Loss rate (network + late) (Q14). */
- uint16_t currentDiscardRate; /* Late loss rate (Q14). */
- uint16_t currentExpandRate; /* Fraction (of original stream) of
- * synthesized speech inserted through
- * expansion (in Q14). */
- uint16_t currentPreemptiveRate; /* Fraction of data inserted through
- * pre-emptive expansion (in Q14). */
- uint16_t currentAccelerateRate; /* Fraction of data removed through
- * acceleration (in Q14). */
- int32_t clockDriftPPM; /* Average clock-drift in parts-per-
- * million (positive or negative). */
- int addedSamples; /* Number of zero samples added in off
- * mode */
-} WebRtcNetEQ_NetworkStatistics;
-
-/*
- * Get the "in-call" statistics from NetEQ.
- * The statistics are reset after the query.
- */
-int WebRtcNetEQ_GetNetworkStatistics(void *inst, WebRtcNetEQ_NetworkStatistics *stats);
-
-
-typedef struct {
- /* Samples removed from background noise only segments. */
- int accelerate_bgn_samples;
-
- /* Samples removed from normal audio segments. */
- int accelerate_normal_samples;
-
- /* Number of samples synthesized during background noise only segments. */
- int expand_bgn_sampels;
-
- /* Number of samples synthesized during normal audio segments. */
- int expand_normal_samples;
-
- /* Number of samples synthesized during background noise only segments,
- * in preemptive mode. */
- int preemptive_expand_bgn_samples;
-
- /* Number of samples synthesized during normal audio segments, in preemptive
- * mode. */
- int preemptive_expand_normal_samples;
-
- /* Number of samples synthesized during background noise only segments,
- * while merging. */
- int merge_expand_bgn_samples;
-
- /* Number of samples synthesized during normal audio segments, while
- * merging. */
- int merge_expand_normal_samples;
-} WebRtcNetEQ_ProcessingActivity;
-
-/*
- * Get the processing activities from NetEQ.
- * The statistics are reset after the query.
- * This API is meant to obtain processing activities in high granularity,
- * e.g. per RecOut() call.
- */
-void WebRtcNetEQ_GetProcessingActivity(void* inst,
- WebRtcNetEQ_ProcessingActivity* stat);
-
-/*
- * Get the raw waiting times for decoded frames. The function writes the last
- * recorded waiting times (from frame arrival to frame decoding) to the memory
- * pointed to by waitingTimeMs. The number of elements written is in the return
- * value. No more than maxLength elements are written. Statistics are reset on
- * each query.
- */
-int WebRtcNetEQ_GetRawFrameWaitingTimes(void *inst,
- int max_length,
- int* waiting_times_ms);
-
-/***********************************************/
-/* Functions for post-decode VAD functionality */
-/***********************************************/
-
-/* NetEQ must be compiled with the flag NETEQ_VAD enabled for these functions to work. */
-
-/*
- * VAD function pointer types
- *
- * These function pointers match the definitions of webrtc VAD functions WebRtcVad_Init,
- * WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in webrtc_vad.h.
- */
-typedef int (*WebRtcNetEQ_VADInitFunction)(void *VAD_inst);
-typedef int (*WebRtcNetEQ_VADSetmodeFunction)(void *VAD_inst, int mode);
-typedef int (*WebRtcNetEQ_VADFunction)(void *VAD_inst, int fs,
- int16_t *frame, int frameLen);
-
-/****************************************************************************
- * WebRtcNetEQ_SetVADInstance(...)
- *
- * Provide a pointer to an allocated VAD instance. If function is never
- * called or it is called with NULL pointer as VAD_inst, the post-decode
- * VAD functionality is disabled. Also provide pointers to init, setmode
- * and VAD functions. These are typically pointers to WebRtcVad_Init,
- * WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in the
- * interface file webrtc_vad.h.
- *
- * Input:
- * - NetEQ_inst : NetEQ instance
- * - VADinst : VAD instance
- * - initFunction : Pointer to VAD init function
- * - setmodeFunction : Pointer to VAD setmode function
- * - VADfunction : Pointer to VAD function
- *
- * Output:
- * - NetEQ_inst : Updated NetEQ instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_SetVADInstance(void *NetEQ_inst, void *VAD_inst,
- WebRtcNetEQ_VADInitFunction initFunction,
- WebRtcNetEQ_VADSetmodeFunction setmodeFunction,
- WebRtcNetEQ_VADFunction VADFunction);
-
-/****************************************************************************
- * WebRtcNetEQ_SetVADMode(...)
- *
- * Pass an aggressiveness mode parameter to the post-decode VAD instance.
- * If this function is never called, mode 0 (quality mode) is used as default.
- *
- * Input:
- * - inst : NetEQ instance
- * - mode : mode parameter (same range as WebRtc VAD mode)
- *
- * Output:
- * - inst : Updated NetEQ instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_SetVADMode(void *NetEQ_inst, int mode);
-
-/****************************************************************************
- * WebRtcNetEQ_RecOutNoDecode(...)
- *
- * Special RecOut that does not do any decoding.
- *
- * Input:
- * - inst : NetEQ instance
- *
- * Output:
- * - inst : Updated NetEQ instance
- * - pw16_outData : Pointer to vector where output should be written
- * - pw16_len : Pointer to variable where output length is returned
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RecOutNoDecode(void *inst, int16_t *pw16_outData,
- int16_t *pw16_len);
-
-/****************************************************************************
- * WebRtcNetEQ_FlushBuffers(...)
- *
- * Flush packet and speech buffers. Does not reset codec database or
- * jitter statistics.
- *
- * Input:
- * - inst : NetEQ instance
- *
- * Output:
- * - inst : Updated NetEQ instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_FlushBuffers(void *inst);
-
-/*****************************************************************************
- * void WebRtcNetEq_EnableAVSync(...)
- *
- * Enable AV-sync. If Enabled, NetEq will screen for sync payloads. For
- * each sync payload a silence frame is generated.
- *
- * Input:
- * - inst : NetEQ instance
- * - enable : non-zero to enable, otherwise disabled.
- *
- * Output:
- * - inst : Updated NetEQ instance
- *
- */
-
-void WebRtcNetEQ_EnableAVSync(void* inst, int enable);
-
-/****************************************************************************
- * WebRtcNetEQ_RecInSyncRTP(...)
- *
- * Insert a sync packet with the given RTP specification.
- *
- * Input:
- * - inst : NetEQ instance
- * - rtpInfo : Pointer to RTP info
- * - receive_timestamp : Receive time (in timestamps of the used codec)
- *
- * Output:
- * - inst : Updated NetEQ instance
- *
- * Return value : if succeeded it returns the number of bytes pushed
- * in, otherwise returns -1.
- */
-
-int WebRtcNetEQ_RecInSyncRTP(void* inst,
- WebRtcNetEQ_RTPInfo* rtp_info,
- uint32_t receive_timestamp);
-
-/*
- * Set a minimum latency for the jitter buffer. The overall delay is the max of
- * |minimum_delay_ms| and the latency that is internally computed based on the
- * inter-arrival times.
- */
-int WebRtcNetEQ_SetMinimumDelay(void *inst, int minimum_delay_ms);
-
-/*
- * Set a maximum latency for the jitter buffer. The overall delay is the min of
- * |maximum_delay_ms| and the latency that is internally computed based on the
- * inter-arrival times.
- */
-int WebRtcNetEQ_SetMaximumDelay(void *inst, int maximum_delay_ms);
-
-/*
- * Get the least required delay in milliseconds given inter-arrival times
- * and playout mode.
- */
-int WebRtcNetEQ_GetRequiredDelayMs(const void* inst);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu.h
deleted file mode 100644
index 931e6dcf561..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu.h
+++ /dev/null
@@ -1,300 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * MCU struct and functions related to the MCU side operations.
- */
-
-#ifndef MCU_H
-#define MCU_H
-
-#include "typedefs.h"
-
-#include "codec_db.h"
-#include "rtcp.h"
-#include "packet_buffer.h"
-#include "buffer_stats.h"
-#include "neteq_statistics.h"
-
-#ifdef NETEQ_ATEVENT_DECODE
-#include "dtmf_buffer.h"
-#endif
-
-#define MAX_ONE_DESC 5 /* cannot do more than this many consecutive one-descriptor decodings */
-#define MAX_LOSS_REPORT_PERIOD 60 /* number of seconds between auto-reset */
-
-enum TsScaling
-{
- kTSnoScaling = 0,
- kTSscalingTwo,
- kTSscalingTwoThirds,
- kTSscalingFourThirds
-};
-
-enum { kLenWaitingTimes = 100 };
-
-typedef struct
-{
-
- int16_t current_Codec;
- int16_t current_Payload;
- uint32_t timeStamp; /* Next timestamp that should be played */
- int16_t millisecondsPerCall;
- uint16_t timestampsPerCall; /* Output chunk size */
- uint16_t fs;
- uint32_t ssrc; /* Current ssrc */
- int16_t new_codec;
- int16_t first_packet;
-
- /* MCU/DSP Communication layer */
- int16_t *pw16_readAddress;
- int16_t *pw16_writeAddress;
- void *main_inst;
-
- CodecDbInst_t codec_DB_inst; /* Information about all the codecs, i.e. which
- functions to use and which codpoints that
- have been assigned */
- SplitInfo_t PayloadSplit_inst; /* Information about how the current codec
- payload should be splitted */
- WebRtcNetEQ_RTCP_t RTCP_inst; /* RTCP statistics */
- PacketBuf_t PacketBuffer_inst; /* The packet buffer */
- BufstatsInst_t BufferStat_inst; /* Statistics that are used to make decision
- for what the DSP should perform */
-#ifdef NETEQ_ATEVENT_DECODE
- dtmf_inst_t DTMF_inst;
-#endif
- int NoOfExpandCalls;
- int16_t AVT_PlayoutOn;
- enum WebRtcNetEQPlayoutMode NetEqPlayoutMode;
-
- int16_t one_desc; /* Number of times running on one desc */
-
- uint32_t lostTS; /* Number of timestamps lost */
- uint32_t lastReportTS; /* Timestamp elapsed since last report was given */
-
- int waiting_times[kLenWaitingTimes]; /* Waiting time statistics storage. */
- int len_waiting_times;
- int next_waiting_time_index;
-
- uint32_t externalTS;
- uint32_t internalTS;
- int16_t TSscalingInitialized;
- enum TsScaling scalingFactor;
-
- /* AV-sync enabled. In AV-sync NetEq screens packets for specific sync
- * packets. Sync packets are not decoded by a decoder but generate all-zero
- * signal with the same number of samples as previously decoded payload.
- * Also in AV-sync mode the sample-size of a sync payload is reported as
- * previous frame-size. */
- int av_sync;
-
-#ifdef NETEQ_STEREO
- int usingStereo;
-#endif
-
- /* The sequence number of the latest decoded RTP payload. */
- int decoded_packet_sequence_number;
- uint32_t decoded_packet_timestamp;
-} MCUInst_t;
-
-/****************************************************************************
- * WebRtcNetEQ_McuReset(...)
- *
- * Reset the MCU instance.
- *
- * Input:
- * - inst : MCU instance
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-int WebRtcNetEQ_McuReset(MCUInst_t *inst);
-
-/****************************************************************************
- * WebRtcNetEQ_ResetMcuInCallStats(...)
- *
- * Reset MCU-side statistics variables for the in-call statistics.
- *
- * Input:
- * - inst : MCU instance
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-int WebRtcNetEQ_ResetMcuInCallStats(MCUInst_t *inst);
-
-/****************************************************************************
- * WebRtcNetEQ_ResetWaitingTimeStats(...)
- *
- * Reset waiting-time statistics.
- *
- * Input:
- * - inst : MCU instance.
- *
- * Return value : n/a
- */
-void WebRtcNetEQ_ResetWaitingTimeStats(MCUInst_t *inst);
-
-/****************************************************************************
- * WebRtcNetEQ_LogWaitingTime(...)
- *
- * Log waiting-time to the statistics.
- *
- * Input:
- * - inst : MCU instance.
- * - waiting_time : Waiting time in "RecOut calls" (i.e., 1 call = 10 ms).
- *
- * Return value : n/a
- */
-void WebRtcNetEQ_StoreWaitingTime(MCUInst_t *inst, int waiting_time);
-
-/****************************************************************************
- * WebRtcNetEQ_ResetMcuJitterStat(...)
- *
- * Reset MCU-side statistics variables for the post-call statistics.
- *
- * Input:
- * - inst : MCU instance
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-int WebRtcNetEQ_ResetMcuJitterStat(MCUInst_t *inst);
-
-/****************************************************************************
- * WebRtcNetEQ_McuAddressInit(...)
- *
- * Initializes MCU with read address and write address.
- *
- * Input:
- * - inst : MCU instance
- * - Data2McuAddress : Pointer to MCU address
- * - Data2DspAddress : Pointer to DSP address
- * - main_inst : Pointer to NetEQ main instance
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-int WebRtcNetEQ_McuAddressInit(MCUInst_t *inst, void * Data2McuAddress,
- void * Data2DspAddress, void *main_inst);
-
-/****************************************************************************
- * WebRtcNetEQ_McuSetFs(...)
- *
- * Initializes MCU with read address and write address.
- *
- * Input:
- * - inst : MCU instance
- * - fs_hz : Sample rate in Hz -- 8000, 16000, 32000, (48000)
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-int WebRtcNetEQ_McuSetFs(MCUInst_t *inst, uint16_t fs_hz);
-
-/****************************************************************************
- * WebRtcNetEQ_SignalMcu(...)
- *
- * Signal the MCU that data is available and ask for a RecOut decision.
- *
- * Input:
- * - inst : MCU instance
- * - av_sync : 1 if NetEQ is in AV-sync mode, otherwise 0.
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-int WebRtcNetEQ_SignalMcu(MCUInst_t *inst);
-
-/****************************************************************************
- * WebRtcNetEQ_RecInInternal(...)
- *
- * This function inserts a packet into the jitter buffer.
- *
- * Input:
- * - MCU_inst : MCU instance
- * - RTPpacket : The RTP packet, parsed into NetEQ's internal RTP struct
- * - uw32_timeRec : Time stamp for the arrival of the packet (not RTP timestamp)
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RecInInternal(MCUInst_t *MCU_inst, RTPPacket_t *RTPpacket,
- uint32_t uw32_timeRec);
-
-/****************************************************************************
- * WebRtcNetEQ_RecInInternal(...)
- *
- * Split the packet according to split_inst and inserts the parts into
- * Buffer_inst.
- *
- * Input:
- * - MCU_inst : MCU instance
- * - RTPpacket : The RTP packet, parsed into NetEQ's internal RTP struct
- * - uw32_timeRec : Time stamp for the arrival of the packet (not RTP timestamp)
- * - av_sync : indicates if AV-sync is enabled, 1 enabled,
- * 0 disabled.
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcNetEQ_SplitAndInsertPayload(RTPPacket_t* packet,
- PacketBuf_t* Buffer_inst,
- SplitInfo_t* split_inst,
- int16_t* flushed,
- int av_sync);
-
-/****************************************************************************
- * WebRtcNetEQ_GetTimestampScaling(...)
- *
- * Update information about timestamp scaling for a payload type
- * in MCU_inst->scalingFactor.
- *
- * Input:
- * - MCU_inst : MCU instance
- * - rtpPayloadType : RTP payload number
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_GetTimestampScaling(MCUInst_t *MCU_inst, int rtpPayloadType);
-
-/****************************************************************************
- * WebRtcNetEQ_ScaleTimestampExternalToInternal(...)
- *
- * Convert from external to internal timestamp using current scaling info.
- *
- * Input:
- * - MCU_inst : MCU instance
- * - externalTS : External timestamp
- *
- * Return value : Internal timestamp
- */
-
-uint32_t WebRtcNetEQ_ScaleTimestampExternalToInternal(const MCUInst_t *MCU_inst,
- uint32_t externalTS);
-
-/****************************************************************************
- * WebRtcNetEQ_ScaleTimestampInternalToExternal(...)
- *
- * Convert from external to internal timestamp using current scaling info.
- *
- * Input:
- * - MCU_inst : MCU instance
- * - externalTS : Internal timestamp
- *
- * Return value : External timestamp
- */
-
-uint32_t WebRtcNetEQ_ScaleTimestampInternalToExternal(const MCUInst_t *MCU_inst,
- uint32_t internalTS);
-#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_address_init.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_address_init.c
deleted file mode 100644
index 666ecc85612..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_address_init.c
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "mcu.h"
-
-#include <string.h> /* to define NULL */
-
-/*
- * Initializes MCU with read address and write address
- */
-int WebRtcNetEQ_McuAddressInit(MCUInst_t *inst, void * Data2McuAddress,
- void * Data2DspAddress, void *main_inst)
-{
-
- inst->pw16_readAddress = (int16_t*) Data2McuAddress;
- inst->pw16_writeAddress = (int16_t*) Data2DspAddress;
- inst->main_inst = main_inst;
-
- inst->millisecondsPerCall = 10;
-
- /* Do expansions in the beginning */
- if (inst->pw16_writeAddress != NULL) inst->pw16_writeAddress[0] = DSP_INSTR_EXPAND;
-
- return (0);
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_dsp_common.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_dsp_common.c
deleted file mode 100644
index 2c48ec7dde2..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_dsp_common.c
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Communication between MCU and DSP sides.
- */
-
-#include "mcu_dsp_common.h"
-
-#include <string.h>
-
-/* Initialize instances with read and write address */
-int WebRtcNetEQ_DSPinit(MainInst_t *inst)
-{
- int res = 0;
-
- res |= WebRtcNetEQ_AddressInit(&inst->DSPinst, NULL, NULL, inst);
- res |= WebRtcNetEQ_McuAddressInit(&inst->MCUinst, NULL, NULL, inst);
-
- return res;
-
-}
-
-/* The DSP side will call this function to interrupt the MCU side */
-int WebRtcNetEQ_DSP2MCUinterrupt(MainInst_t *inst, int16_t *pw16_shared_mem)
-{
- inst->MCUinst.pw16_readAddress = pw16_shared_mem;
- inst->MCUinst.pw16_writeAddress = pw16_shared_mem;
- return WebRtcNetEQ_SignalMcu(&inst->MCUinst);
-}
-
-int WebRtcNetEQ_IsSyncPayload(const void* payload, int payload_len_bytes) {
- if (payload_len_bytes != SYNC_PAYLOAD_LEN_BYTES ||
- memcmp(payload, kSyncPayload, SYNC_PAYLOAD_LEN_BYTES) != 0) {
- return 0;
- }
- return 1;
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_dsp_common.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_dsp_common.h
deleted file mode 100644
index b4ab514bc95..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_dsp_common.h
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * The main NetEQ instance, which is where the DSP and MCU sides join.
- */
-
-#ifndef MCU_DSP_COMMON_H
-#define MCU_DSP_COMMON_H
-
-#include "typedefs.h"
-
-#include "dsp.h"
-#include "mcu.h"
-
-/* Define size of shared memory area. */
-#if defined(NETEQ_48KHZ_WIDEBAND)
- #define SHARED_MEM_SIZE (6*640)
-#elif defined(NETEQ_32KHZ_WIDEBAND)
- #define SHARED_MEM_SIZE (4*640)
-#elif defined(NETEQ_WIDEBAND)
- #define SHARED_MEM_SIZE (2*640)
-#else
- #define SHARED_MEM_SIZE 640
-#endif
-
-#define SYNC_PAYLOAD_LEN_BYTES 7
-static const uint8_t kSyncPayload[SYNC_PAYLOAD_LEN_BYTES] = {
- 'a', 'v', 's', 'y', 'n', 'c', '\0' };
-
-/* Struct to hold the NetEQ instance */
-typedef struct
-{
- DSPInst_t DSPinst; /* DSP part of the NetEQ instance */
- MCUInst_t MCUinst; /* MCU part of the NetEQ instance */
- int16_t ErrorCode; /* Store last error code */
-#ifdef NETEQ_STEREO
- int16_t masterSlave; /* 0 = not set, 1 = master, 2 = slave */
-#endif /* NETEQ_STEREO */
-} MainInst_t;
-
-/* Struct used for communication between DSP and MCU sides of NetEQ */
-typedef struct
-{
- uint32_t playedOutTS; /* Timestamp position at end of DSP data */
- uint16_t samplesLeft; /* Number of samples stored */
- int16_t MD; /* Multiple description codec information */
- int16_t lastMode; /* Latest mode of NetEQ playout */
- int16_t frameLen; /* Frame length of previously decoded packet */
-} DSP2MCU_info_t;
-
-/* Initialize instances with read and write address */
-int WebRtcNetEQ_DSPinit(MainInst_t *inst);
-
-/* The DSP side will call this function to interrupt the MCU side */
-int WebRtcNetEQ_DSP2MCUinterrupt(MainInst_t *inst, int16_t *pw16_shared_mem);
-
-/* Returns 1 if the given payload matches |kSyncPayload| payload, otherwise
- * 0 is returned. */
-int WebRtcNetEQ_IsSyncPayload(const void* payload, int payload_len_bytes);
-
-#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_reset.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_reset.c
deleted file mode 100644
index ddbb798af87..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mcu_reset.c
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Reset MCU side data.
- */
-
-#include "mcu.h"
-
-#include <assert.h>
-#include <string.h>
-
-#include "automode.h"
-
-int WebRtcNetEQ_McuReset(MCUInst_t *inst)
-{
-
-#ifdef NETEQ_ATEVENT_DECODE
- int ok;
-#endif
-
- /* MCU/DSP Communication layer */
- inst->pw16_readAddress = NULL;
- inst->pw16_writeAddress = NULL;
- inst->main_inst = NULL;
- inst->one_desc = 0;
- inst->BufferStat_inst.Automode_inst.extraDelayMs = 0;
- inst->BufferStat_inst.Automode_inst.minimum_delay_ms = 0;
- inst->BufferStat_inst.Automode_inst.maximum_delay_ms = 10000;
- inst->NetEqPlayoutMode = kPlayoutOn;
- inst->av_sync = 0;
-
- WebRtcNetEQ_DbReset(&inst->codec_DB_inst);
- memset(&inst->PayloadSplit_inst, 0, sizeof(SplitInfo_t));
-
- /* Clear the Packet buffer and the pointer to memory storage */
- WebRtcNetEQ_PacketBufferFlush(&inst->PacketBuffer_inst);
- inst->PacketBuffer_inst.memorySizeW16 = 0;
- inst->PacketBuffer_inst.maxInsertPositions = 0;
-
- /* Clear the decision and delay history */
- memset(&inst->BufferStat_inst, 0, sizeof(BufstatsInst_t));
-#ifdef NETEQ_ATEVENT_DECODE
- ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 8000, 560);
- if (ok != 0)
- {
- return ok;
- }
-#endif
- inst->NoOfExpandCalls = 0;
- inst->current_Codec = -1;
- inst->current_Payload = -1;
-
- inst->millisecondsPerCall = 10;
- inst->timestampsPerCall = inst->millisecondsPerCall * 8;
- inst->fs = 8000;
- inst->first_packet = 1;
-
- WebRtcNetEQ_ResetMcuInCallStats(inst);
-
- WebRtcNetEQ_ResetWaitingTimeStats(inst);
-
- WebRtcNetEQ_ResetMcuJitterStat(inst);
-
- WebRtcNetEQ_ResetAutomode(&(inst->BufferStat_inst.Automode_inst),
- inst->PacketBuffer_inst.maxInsertPositions);
-
- return 0;
-}
-
-/*
- * Reset MCU-side statistics variables for the in-call statistics.
- */
-
-int WebRtcNetEQ_ResetMcuInCallStats(MCUInst_t *inst)
-{
- inst->lostTS = 0;
- inst->lastReportTS = 0;
- inst->PacketBuffer_inst.discardedPackets = 0;
-
- return 0;
-}
-
-/*
- * Reset waiting-time statistics.
- */
-
-void WebRtcNetEQ_ResetWaitingTimeStats(MCUInst_t *inst) {
- memset(inst->waiting_times, 0,
- kLenWaitingTimes * sizeof(inst->waiting_times[0]));
- inst->len_waiting_times = 0;
- inst->next_waiting_time_index = 0;
-}
-
-/*
- * Store waiting-time in the statistics.
- */
-
-void WebRtcNetEQ_StoreWaitingTime(MCUInst_t *inst, int waiting_time) {
- assert(inst->next_waiting_time_index < kLenWaitingTimes);
- inst->waiting_times[inst->next_waiting_time_index] = waiting_time;
- inst->next_waiting_time_index++;
- if (inst->next_waiting_time_index >= kLenWaitingTimes) {
- inst->next_waiting_time_index = 0;
- }
- if (inst->len_waiting_times < kLenWaitingTimes) {
- inst->len_waiting_times++;
- }
-}
-
-/*
- * Reset all MCU-side statistics variables for the post-call statistics.
- */
-
-int WebRtcNetEQ_ResetMcuJitterStat(MCUInst_t *inst)
-{
- inst->BufferStat_inst.Automode_inst.countIAT500ms = 0;
- inst->BufferStat_inst.Automode_inst.countIAT1000ms = 0;
- inst->BufferStat_inst.Automode_inst.countIAT2000ms = 0;
- inst->BufferStat_inst.Automode_inst.longestIATms = 0;
-
- return 0;
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.c
deleted file mode 100644
index 78da2c7c7db..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.c
+++ /dev/null
@@ -1,570 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This is the function to merge a new packet with expanded data after a packet loss.
- */
-
-#include "dsp.h"
-
-#include "signal_processing_library.h"
-
-#include "dsp_helpfunctions.h"
-#include "neteq_error_codes.h"
-
-/****************************************************************************
- * WebRtcNetEQ_Merge(...)
- *
- * This function...
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - scratchPtr : Pointer to scratch vector.
- * - decoded : Pointer to new decoded speech.
- * - len : Number of samples in pw16_decoded.
- *
- *
- * Output:
- * - inst : Updated user information
- * - outData : Pointer to a memory space where the output data
- * should be stored
- * - pw16_len : Number of samples written to pw16_outData
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-/* Scratch usage:
-
- Type Name size startpos endpos
- int16_t pw16_expanded 210*fs/8000 0 209*fs/8000
- int16_t pw16_expandedLB 100 210*fs/8000 99+210*fs/8000
- int16_t pw16_decodedLB 40 100+210*fs/8000 139+210*fs/8000
- int32_t pw32_corr 2*60 140+210*fs/8000 260+210*fs/8000
- int16_t pw16_corrVec 68 210*fs/8000 67+210*fs/8000
-
- [gap in scratch vector]
-
- func WebRtcNetEQ_Expand 40+370*fs/8000 126*fs/8000 39+496*fs/8000
-
- Total: 40+496*fs/8000
- */
-
-#define SCRATCH_pw16_expanded 0
-#if (defined(NETEQ_48KHZ_WIDEBAND))
-#define SCRATCH_pw16_expandedLB 1260
-#define SCRATCH_pw16_decodedLB 1360
-#define SCRATCH_pw32_corr 1400
-#define SCRATCH_pw16_corrVec 1260
-#define SCRATCH_NETEQ_EXPAND 756
-#elif (defined(NETEQ_32KHZ_WIDEBAND))
-#define SCRATCH_pw16_expandedLB 840
-#define SCRATCH_pw16_decodedLB 940
-#define SCRATCH_pw32_corr 980
-#define SCRATCH_pw16_corrVec 840
-#define SCRATCH_NETEQ_EXPAND 504
-#elif (defined(NETEQ_WIDEBAND))
-#define SCRATCH_pw16_expandedLB 420
-#define SCRATCH_pw16_decodedLB 520
-#define SCRATCH_pw32_corr 560
-#define SCRATCH_pw16_corrVec 420
-#define SCRATCH_NETEQ_EXPAND 252
-#else /* NB */
-#define SCRATCH_pw16_expandedLB 210
-#define SCRATCH_pw16_decodedLB 310
-#define SCRATCH_pw32_corr 350
-#define SCRATCH_pw16_corrVec 210
-#define SCRATCH_NETEQ_EXPAND 126
-#endif
-
-int WebRtcNetEQ_Merge(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_decoded, int len, int16_t *pw16_outData,
- int16_t *pw16_len)
-{
-
- int16_t fs_mult;
- int16_t fs_shift;
- int32_t w32_En_new_frame, w32_En_old_frame;
- int16_t w16_expmax, w16_newmax;
- int16_t w16_tmp, w16_tmp2;
- int32_t w32_tmp;
-#ifdef SCRATCH
- int16_t *pw16_expanded = pw16_scratchPtr + SCRATCH_pw16_expanded;
- int16_t *pw16_expandedLB = pw16_scratchPtr + SCRATCH_pw16_expandedLB;
- int16_t *pw16_decodedLB = pw16_scratchPtr + SCRATCH_pw16_decodedLB;
- int32_t *pw32_corr = (int32_t*) (pw16_scratchPtr + SCRATCH_pw32_corr);
- int16_t *pw16_corrVec = pw16_scratchPtr + SCRATCH_pw16_corrVec;
-#else
- int16_t pw16_expanded[(125+80+5)*FSMULT];
- int16_t pw16_expandedLB[100];
- int16_t pw16_decodedLB[40];
- int32_t pw32_corr[60];
- int16_t pw16_corrVec[4+60+4];
-#endif
- int16_t *pw16_corr = &pw16_corrVec[4];
- int16_t w16_stopPos = 0, w16_bestIndex, w16_interpLen;
- int16_t w16_bestVal; /* bestVal is dummy */
- int16_t w16_startfact, w16_inc;
- int16_t w16_expandedLen;
- int16_t w16_startPos;
- int16_t w16_expLen, w16_newLen = 0;
- int16_t *pw16_decodedOut;
- int16_t w16_muted;
-
- int w16_decodedLen = len;
-
-#ifdef NETEQ_STEREO
- MasterSlaveInfo *msInfo = inst->msInfo;
-#endif
-
- fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
- fs_shift = 30 - WebRtcSpl_NormW32(fs_mult); /* Note that this is not "exact" for 48kHz */
-
- /*************************************
- * Generate data to merge with
- *************************************/
- /*
- * Check how much data that is left since earlier
- * (at least there should be the overlap)...
- */
- w16_startPos = inst->endPosition - inst->curPosition;
- /* Get one extra expansion to merge and overlap with */
- inst->ExpandInst.w16_stopMuting = 1;
- inst->ExpandInst.w16_lagsDirection = 1; /* make sure we get the "optimal" lag */
- inst->ExpandInst.w16_lagsPosition = -1; /* out of the 3 possible ones */
- w16_expandedLen = 0; /* Does not fill any function currently */
-
- if (w16_startPos >= 210 * FSMULT)
- {
- /*
- * The number of samples available in the sync buffer is more than what fits in
- * pw16_expanded.Keep the first 210*FSMULT samples, but shift them towards the end of
- * the buffer. This is ok, since all of the buffer will be expand data anyway, so as
- * long as the beginning is left untouched, we're fine.
- */
-
- w16_tmp = w16_startPos - 210 * FSMULT; /* length difference */
-
- WEBRTC_SPL_MEMMOVE_W16(&inst->speechBuffer[inst->curPosition+w16_tmp] ,
- &inst->speechBuffer[inst->curPosition], 210*FSMULT);
-
- inst->curPosition += w16_tmp; /* move start position of sync buffer accordingly */
- w16_startPos = 210 * FSMULT; /* this is the truncated length */
- }
-
- WebRtcNetEQ_Expand(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
-#endif
- pw16_expanded, /* let Expand write to beginning of pw16_expanded to avoid overflow */
- &w16_newLen, 0);
-
- /*
- * Now shift the data in pw16_expanded to where it belongs.
- * Truncate all that ends up outside the vector.
- */
-
- WEBRTC_SPL_MEMMOVE_W16(&pw16_expanded[w16_startPos], pw16_expanded,
- WEBRTC_SPL_MIN(w16_newLen,
- WEBRTC_SPL_MAX(210*FSMULT - w16_startPos, 0) ) );
-
- inst->ExpandInst.w16_stopMuting = 0;
-
- /* Copy what is left since earlier into the expanded vector */
-
- WEBRTC_SPL_MEMCPY_W16(pw16_expanded, &inst->speechBuffer[inst->curPosition], w16_startPos);
-
- /*
- * Do "ugly" copy and paste from the expanded in order to generate more data
- * to correlate (but not interpolate) with.
- */
- w16_expandedLen = (120 + 80 + 2) * fs_mult;
- w16_expLen = w16_startPos + w16_newLen;
-
- if (w16_expLen < w16_expandedLen)
- {
- while ((w16_expLen + w16_newLen) < w16_expandedLen)
- {
- WEBRTC_SPL_MEMCPY_W16(&pw16_expanded[w16_expLen], &pw16_expanded[w16_startPos],
- w16_newLen);
- w16_expLen += w16_newLen;
- }
-
- /* Copy last part (fraction of a whole expansion) */
-
- WEBRTC_SPL_MEMCPY_W16(&pw16_expanded[w16_expLen], &pw16_expanded[w16_startPos],
- (w16_expandedLen-w16_expLen));
- }
- w16_expLen = w16_expandedLen;
-
- /* Adjust muting factor (main muting factor times expand muting factor) */
- inst->w16_muteFactor
- = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(inst->w16_muteFactor,
- inst->ExpandInst.w16_expandMuteFactor, 14);
-
- /* Adjust muting factor if new vector is more or less of the BGN energy */
- len = WEBRTC_SPL_MIN(64*fs_mult, w16_decodedLen);
- w16_expmax = WebRtcSpl_MaxAbsValueW16(pw16_expanded, (int16_t) len);
- w16_newmax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (int16_t) len);
-
- /* Calculate energy of old data */
- w16_tmp = 6 + fs_shift - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_expmax, w16_expmax));
- w16_tmp = WEBRTC_SPL_MAX(w16_tmp,0);
- w32_En_old_frame = WebRtcNetEQ_DotW16W16(pw16_expanded, pw16_expanded, len, w16_tmp);
-
- /* Calculate energy of new data */
- w16_tmp2 = 6 + fs_shift - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_newmax, w16_newmax));
- w16_tmp2 = WEBRTC_SPL_MAX(w16_tmp2,0);
- w32_En_new_frame = WebRtcNetEQ_DotW16W16(pw16_decoded, pw16_decoded, len, w16_tmp2);
-
- /* Align to same Q-domain */
- if (w16_tmp2 > w16_tmp)
- {
- w32_En_old_frame = WEBRTC_SPL_RSHIFT_W32(w32_En_old_frame, (w16_tmp2-w16_tmp));
- }
- else
- {
- w32_En_new_frame = WEBRTC_SPL_RSHIFT_W32(w32_En_new_frame, (w16_tmp-w16_tmp2));
- }
-
- /* Calculate muting factor to use for new frame */
- if (w32_En_new_frame > w32_En_old_frame)
- {
- /* Normalize w32_En_new_frame to 14 bits */
- w16_tmp = WebRtcSpl_NormW32(w32_En_new_frame) - 17;
- w32_En_new_frame = WEBRTC_SPL_SHIFT_W32(w32_En_new_frame, w16_tmp);
-
- /*
- * Put w32_En_old_frame in a domain 14 higher, so that
- * w32_En_old_frame/w32_En_new_frame is in Q14
- */
- w16_tmp = w16_tmp + 14;
- w32_En_old_frame = WEBRTC_SPL_SHIFT_W32(w32_En_old_frame, w16_tmp);
- w16_tmp
- = WebRtcSpl_DivW32W16ResW16(w32_En_old_frame, (int16_t) w32_En_new_frame);
- /* Calculate sqrt(w32_En_old_frame/w32_En_new_frame) in Q14 */
- w16_muted = (int16_t) WebRtcSpl_SqrtFloor(
- WEBRTC_SPL_LSHIFT_W32((int32_t)w16_tmp,14));
- }
- else
- {
- w16_muted = 16384; /* Set = 1.0 when old frame has higher energy than new */
- }
-
- /* Set the raise the continued muting factor w16_muted if w16_muteFactor is lower */
- if (w16_muted > inst->w16_muteFactor)
- {
- inst->w16_muteFactor = WEBRTC_SPL_MIN(w16_muted, 16384);
- }
-
-#ifdef NETEQ_STEREO
-
- /* Sanity for msInfo */
- if (msInfo == NULL)
- {
- /* this should not happen here */
- return MASTER_SLAVE_ERROR;
- }
-
- /* do not downsample and calculate correlations for slave instance(s) */
- if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
- {
-#endif
-
- /*********************************************
- * Downsample to 4kHz and find best overlap
- *********************************************/
-
- /* Downsample to 4 kHz */
- if (inst->fs == 8000)
- {
- WebRtcSpl_DownsampleFast(&pw16_expanded[2], (int16_t) (w16_expandedLen - 2),
- pw16_expandedLB, (int16_t) (100),
- (int16_t*) WebRtcNetEQ_kDownsample8kHzTbl, (int16_t) 3,
- (int16_t) 2, (int16_t) 0);
- if (w16_decodedLen <= 80)
- {
- /* Not quite long enough, so we have to cheat a bit... */
- int16_t temp_len = w16_decodedLen - 2;
- w16_tmp = temp_len / 2;
- WebRtcSpl_DownsampleFast(&pw16_decoded[2], temp_len,
- pw16_decodedLB, w16_tmp,
- (int16_t*) WebRtcNetEQ_kDownsample8kHzTbl,
- (int16_t) 3, (int16_t) 2, (int16_t) 0);
- WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40 - w16_tmp));
- }
- else
- {
- WebRtcSpl_DownsampleFast(&pw16_decoded[2],
- (int16_t) (w16_decodedLen - 2), pw16_decodedLB,
- (int16_t) (40), (int16_t*) WebRtcNetEQ_kDownsample8kHzTbl,
- (int16_t) 3, (int16_t) 2, (int16_t) 0);
- }
-#ifdef NETEQ_WIDEBAND
- }
- else if (inst->fs==16000)
- {
- WebRtcSpl_DownsampleFast(
- &pw16_expanded[4], (int16_t)(w16_expandedLen-4),
- pw16_expandedLB, (int16_t)(100),
- (int16_t*)WebRtcNetEQ_kDownsample16kHzTbl, (int16_t)5,
- (int16_t)4, (int16_t)0);
- if (w16_decodedLen<=160)
- {
- /* Not quite long enough, so we have to cheat a bit... */
- int16_t temp_len = w16_decodedLen - 4;
- w16_tmp = temp_len / 4;
- WebRtcSpl_DownsampleFast(
- &pw16_decoded[4], temp_len,
- pw16_decodedLB, w16_tmp,
- (int16_t*)WebRtcNetEQ_kDownsample16kHzTbl, (int16_t)5,
- (int16_t)4, (int16_t)0);
- WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40-w16_tmp));
- }
- else
- {
- WebRtcSpl_DownsampleFast(
- &pw16_decoded[4], (int16_t)(w16_decodedLen-4),
- pw16_decodedLB, (int16_t)(40),
- (int16_t*)WebRtcNetEQ_kDownsample16kHzTbl, (int16_t)5,
- (int16_t)4, (int16_t)0);
- }
-#endif
-#ifdef NETEQ_32KHZ_WIDEBAND
- }
- else if (inst->fs==32000)
- {
- /*
- * TODO(hlundin) Why is the offset into pw16_expanded 6?
- */
- WebRtcSpl_DownsampleFast(
- &pw16_expanded[6], (int16_t)(w16_expandedLen-6),
- pw16_expandedLB, (int16_t)(100),
- (int16_t*)WebRtcNetEQ_kDownsample32kHzTbl, (int16_t)7,
- (int16_t)8, (int16_t)0);
- if (w16_decodedLen<=320)
- {
- /* Not quite long enough, so we have to cheat a bit... */
- int16_t temp_len = w16_decodedLen - 6;
- w16_tmp = temp_len / 8;
- WebRtcSpl_DownsampleFast(
- &pw16_decoded[6], temp_len,
- pw16_decodedLB, w16_tmp,
- (int16_t*)WebRtcNetEQ_kDownsample32kHzTbl, (int16_t)7,
- (int16_t)8, (int16_t)0);
- WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40-w16_tmp));
- }
- else
- {
- WebRtcSpl_DownsampleFast(
- &pw16_decoded[6], (int16_t)(w16_decodedLen-6),
- pw16_decodedLB, (int16_t)(40),
- (int16_t*)WebRtcNetEQ_kDownsample32kHzTbl, (int16_t)7,
- (int16_t)8, (int16_t)0);
- }
-#endif
-#ifdef NETEQ_48KHZ_WIDEBAND
- }
- else /* if (inst->fs==48000) */
- {
- /*
- * TODO(hlundin) Why is the offset into pw16_expanded 6?
- */
- WebRtcSpl_DownsampleFast(
- &pw16_expanded[6], (int16_t)(w16_expandedLen-6),
- pw16_expandedLB, (int16_t)(100),
- (int16_t*)WebRtcNetEQ_kDownsample48kHzTbl, (int16_t)7,
- (int16_t)12, (int16_t)0);
- if (w16_decodedLen<=320)
- {
- /* Not quite long enough, so we have to cheat a bit... */
- /*
- * TODO(hlundin): Is this correct? Downsampling is a factor 12
- * but w16_tmp = temp_len / 8.
- * (Was w16_tmp = ((w16_decodedLen-6)>>3) before re-write.)
- */
- int16_t temp_len = w16_decodedLen - 6;
- w16_tmp = temp_len / 8;
- WebRtcSpl_DownsampleFast(
- &pw16_decoded[6], temp_len,
- pw16_decodedLB, w16_tmp,
- (int16_t*)WebRtcNetEQ_kDownsample48kHzTbl, (int16_t)7,
- (int16_t)12, (int16_t)0);
- WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40-w16_tmp));
- }
- else
- {
- WebRtcSpl_DownsampleFast(
- &pw16_decoded[6], (int16_t)(w16_decodedLen-6),
- pw16_decodedLB, (int16_t)(40),
- (int16_t*)WebRtcNetEQ_kDownsample48kHzTbl, (int16_t)7,
- (int16_t)12, (int16_t)0);
- }
-#endif
- }
-
- /* Calculate correlation without any normalization (40 samples) */
- w16_tmp = WebRtcSpl_DivW32W16ResW16((int32_t) inst->ExpandInst.w16_maxLag,
- (int16_t) (fs_mult * 2)) + 1;
- w16_stopPos = WEBRTC_SPL_MIN(60, w16_tmp);
- w32_tmp = WEBRTC_SPL_MUL_16_16(w16_expmax, w16_newmax);
- if (w32_tmp > 26843546)
- {
- w16_tmp = 3;
- }
- else
- {
- w16_tmp = 0;
- }
-
- WebRtcNetEQ_CrossCorr(pw32_corr, pw16_decodedLB, pw16_expandedLB, 40,
- (int16_t) w16_stopPos, w16_tmp, 1);
-
- /* Normalize correlation to 14 bits and put in a int16_t vector */
- WebRtcSpl_MemSetW16(pw16_corrVec, 0, (4 + 60 + 4));
- w32_tmp = WebRtcSpl_MaxAbsValueW32(pw32_corr, w16_stopPos);
- w16_tmp = 17 - WebRtcSpl_NormW32(w32_tmp);
- w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
-
- WebRtcSpl_VectorBitShiftW32ToW16(pw16_corr, w16_stopPos, pw32_corr, w16_tmp);
-
- /* Calculate allowed starting point for peak finding.
- The peak location bestIndex must fulfill two criteria:
- (1) w16_bestIndex+w16_decodedLen < inst->timestampsPerCall+inst->ExpandInst.w16_overlap
- (2) w16_bestIndex+w16_decodedLen < w16_startPos */
- w16_tmp = WEBRTC_SPL_MAX(0, WEBRTC_SPL_MAX(w16_startPos,
- inst->timestampsPerCall+inst->ExpandInst.w16_overlap) - w16_decodedLen);
- /* Downscale starting index to 4kHz domain */
- w16_tmp2 = WebRtcSpl_DivW32W16ResW16((int32_t) w16_tmp,
- (int16_t) (fs_mult << 1));
-
-#ifdef NETEQ_STEREO
- } /* end if (msInfo->msMode != NETEQ_SLAVE) */
-
- if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
- {
- /* This is master or mono instance; find peak */
- WebRtcNetEQ_PeakDetection(&pw16_corr[w16_tmp2], w16_stopPos, 1, fs_mult, &w16_bestIndex,
- &w16_bestVal);
- w16_bestIndex += w16_tmp; /* compensate for modified starting index */
- msInfo->bestIndex = w16_bestIndex;
- }
- else if (msInfo->msMode == NETEQ_SLAVE)
- {
- /* Get peak location from master instance */
- w16_bestIndex = msInfo->bestIndex;
- }
- else
- {
- /* Invalid mode */
- return MASTER_SLAVE_ERROR;
- }
-
-#else /* NETEQ_STEREO */
-
- /* Find peak */
- WebRtcNetEQ_PeakDetection(&pw16_corr[w16_tmp2], w16_stopPos, 1, fs_mult, &w16_bestIndex,
- &w16_bestVal);
- w16_bestIndex += w16_tmp; /* compensate for modified starting index */
-
-#endif /* NETEQ_STEREO */
-
- /*
- * Ensure that underrun does not occur for 10ms case => we have to get at least
- * 10ms + overlap . (This should never happen thanks to the above modification of
- * peak-finding starting point.)
- * */
- while ((w16_bestIndex + w16_decodedLen) < (inst->timestampsPerCall
- + inst->ExpandInst.w16_overlap) || w16_bestIndex + w16_decodedLen < w16_startPos)
- {
- w16_bestIndex += w16_newLen; /* Jump one lag ahead */
- }
- pw16_decodedOut = pw16_outData + w16_bestIndex;
-
- /* Mute the new decoded data if needed (and unmute it linearly) */
- w16_interpLen = WEBRTC_SPL_MIN(60*fs_mult,
- w16_expandedLen-w16_bestIndex); /* this is the overlapping part of pw16_expanded */
- w16_interpLen = WEBRTC_SPL_MIN(w16_interpLen, w16_decodedLen);
- w16_inc = WebRtcSpl_DivW32W16ResW16(4194,
- fs_mult); /* in Q20, 0.004 for NB and 0.002 for WB */
- if (inst->w16_muteFactor < 16384)
- {
- WebRtcNetEQ_UnmuteSignal(pw16_decoded, &inst->w16_muteFactor, pw16_decoded, w16_inc,
- (int16_t) w16_interpLen);
- WebRtcNetEQ_UnmuteSignal(&pw16_decoded[w16_interpLen], &inst->w16_muteFactor,
- &pw16_decodedOut[w16_interpLen], w16_inc,
- (int16_t) (w16_decodedLen - w16_interpLen));
- }
- else
- {
- /* No muting needed */
-
- WEBRTC_SPL_MEMMOVE_W16(&pw16_decodedOut[w16_interpLen], &pw16_decoded[w16_interpLen],
- (w16_decodedLen-w16_interpLen));
- }
-
- /* Do overlap and interpolate linearly */
- w16_inc = WebRtcSpl_DivW32W16ResW16(16384, (int16_t) (w16_interpLen + 1)); /* Q14 */
- w16_startfact = (16384 - w16_inc);
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_expanded, w16_bestIndex);
- WebRtcNetEQ_MixVoiceUnvoice(pw16_decodedOut, &pw16_expanded[w16_bestIndex], pw16_decoded,
- &w16_startfact, w16_inc, w16_interpLen);
-
- inst->w16_mode = MODE_MERGE;
- inst->ExpandInst.w16_consecExp = 0; /* Last was not expand any more */
-
- /* New added length (w16_startPos samples were borrowed) */
- *pw16_len = w16_bestIndex + w16_decodedLen - w16_startPos;
-
- /* Update VQmon parameter */
- inst->w16_concealedTS += (*pw16_len - w16_decodedLen);
- inst->w16_concealedTS = WEBRTC_SPL_MAX(0, inst->w16_concealedTS);
-
- /* Update in-call and post-call statistics */
- if (inst->ExpandInst.w16_expandMuteFactor == 0)
- {
- /* expansion generates noise only */
- inst->statInst.expandedNoiseSamples += (*pw16_len - w16_decodedLen);
- /* Short-term activity statistics. */
- inst->activity_stats.merge_expand_bgn_samples +=
- (*pw16_len - w16_decodedLen);
- }
- else
- {
- /* expansion generates more than only noise */
- inst->statInst.expandedVoiceSamples += (*pw16_len - w16_decodedLen);
- /* Short-term activity statistics. */
- inst->activity_stats.merge_expand_normal_samples +=
- (*pw16_len - w16_decodedLen);
- }
- inst->statInst.expandLength += (*pw16_len - w16_decodedLen);
-
-
- /* Copy back the first part of the data to the speechHistory */
-
- WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->curPosition], pw16_outData, w16_startPos);
-
-
- /* Move data to within outData */
-
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, &pw16_outData[w16_startPos], (*pw16_len));
-
- return 0;
-}
-
-#undef SCRATCH_pw16_expanded
-#undef SCRATCH_pw16_expandedLB
-#undef SCRATCH_pw16_decodedLB
-#undef SCRATCH_pw32_corr
-#undef SCRATCH_pw16_corrVec
-#undef SCRATCH_NETEQ_EXPAND
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/merge.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.cc
index 463b2ca784c..d3d8077516b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/merge.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/merge.h"
+#include "webrtc/modules/audio_coding/neteq/merge.h"
#include <assert.h>
#include <string.h> // memmove, memcpy, memset, size_t
@@ -16,10 +16,11 @@
#include <algorithm> // min, max
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/dsp_helper.h"
-#include "webrtc/modules/audio_coding/neteq4/expand.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
@@ -248,7 +249,7 @@ void Merge::Downsample(const int16_t* input, int input_length,
int num_coefficients;
int decimation_factor = fs_hz_ / 4000;
static const int kCompensateDelay = 0;
- int length_limit = fs_hz_ / 100;
+ int length_limit = fs_hz_ / 100; // 10 ms in samples.
if (fs_hz_ == 8000) {
filter_coefficients = DspHelper::kDownsample8kHzTbl;
num_coefficients = 3;
@@ -261,8 +262,6 @@ void Merge::Downsample(const int16_t* input, int input_length,
} else { // fs_hz_ == 48000
filter_coefficients = DspHelper::kDownsample48kHzTbl;
num_coefficients = 7;
- // TODO(hlundin) Why is |length_limit| not 480 (legacy)?
- length_limit = 320;
}
int signal_offset = num_coefficients - 1;
WebRtcSpl_DownsampleFast(&expanded_signal[signal_offset],
@@ -309,9 +308,11 @@ int16_t Merge::CorrelateAndPeakSearch(int16_t expanded_max, int16_t input_max,
stop_position_downsamp, correlation_shift, 1);
// Normalize correlation to 14 bits and copy to a 16-bit array.
- static const int kPadLength = 4;
- int16_t correlation16[kPadLength + kMaxCorrelationLength + kPadLength] = {0};
- int16_t* correlation_ptr = &correlation16[kPadLength];
+ const int pad_length = static_cast<int>(expand_->overlap_length() - 1);
+ const int correlation_buffer_size = 2 * pad_length + kMaxCorrelationLength;
+ scoped_ptr<int16_t[]> correlation16(new int16_t[correlation_buffer_size]);
+ memset(correlation16.get(), 0, correlation_buffer_size * sizeof(int16_t));
+ int16_t* correlation_ptr = &correlation16[pad_length];
int32_t max_correlation = WebRtcSpl_MaxAbsValueW32(correlation,
stop_position_downsamp);
int16_t norm_shift = std::max(0, 17 - WebRtcSpl_NormW32(max_correlation));
@@ -334,7 +335,7 @@ int16_t Merge::CorrelateAndPeakSearch(int16_t expanded_max, int16_t input_max,
// start index |start_index_downsamp| and the effective array length.
int modified_stop_pos =
std::min(stop_position_downsamp,
- kMaxCorrelationLength + kPadLength - start_index_downsamp);
+ kMaxCorrelationLength + pad_length - start_index_downsamp);
int best_correlation_index;
int16_t best_correlation;
static const int kNumCorrelationCandidates = 1;
@@ -357,4 +358,9 @@ int16_t Merge::CorrelateAndPeakSearch(int16_t expanded_max, int16_t input_max,
return best_correlation_index;
}
+int Merge::RequiredFutureSamples() {
+ return static_cast<int>(fs_hz_ / 100 * num_channels_); // 10 ms.
+}
+
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/merge.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.h
index f1f64e6c538..1bf0483dfe1 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/merge.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge.h
@@ -8,13 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MERGE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MERGE_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MERGE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MERGE_H_
#include <assert.h>
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -35,8 +35,8 @@ class Merge {
public:
Merge(int fs_hz, size_t num_channels, Expand* expand, SyncBuffer* sync_buffer)
: fs_hz_(fs_hz),
- fs_mult_(fs_hz_ / 8000),
num_channels_(num_channels),
+ fs_mult_(fs_hz_ / 8000),
timestamps_per_call_(fs_hz_ / 100),
expand_(expand),
sync_buffer_(sync_buffer),
@@ -44,6 +44,8 @@ class Merge {
assert(num_channels_ > 0);
}
+ virtual ~Merge() {}
+
// The main method to produce the audio data. The decoded data is supplied in
// |input|, having |input_length| samples in total for all channels
// (interleaved). The result is written to |output|. The number of channels
@@ -51,9 +53,15 @@ class Merge {
// de-interleaving |input|. The values in |external_mute_factor_array| (Q14)
// will be used to scale the audio, and is updated in the process. The array
// must have |num_channels_| elements.
- int Process(int16_t* input, size_t input_length,
- int16_t* external_mute_factor_array,
- AudioMultiVector* output);
+ virtual int Process(int16_t* input, size_t input_length,
+ int16_t* external_mute_factor_array,
+ AudioMultiVector* output);
+
+ virtual int RequiredFutureSamples();
+
+ protected:
+ const int fs_hz_;
+ const size_t num_channels_;
private:
static const int kMaxSampleRate = 48000;
@@ -87,9 +95,7 @@ class Merge {
int start_position, int input_length,
int expand_period) const;
- const int fs_hz_;
const int fs_mult_; // fs_hz_ / 8000.
- const size_t num_channels_;
const int timestamps_per_call_;
Expand* expand_;
SyncBuffer* sync_buffer_;
@@ -101,4 +107,4 @@ class Merge {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MERGE_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MERGE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/merge_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge_unittest.cc
index 1d7b1f1fed1..fb5f789ff1e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/merge_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/merge_unittest.cc
@@ -10,15 +10,15 @@
// Unit tests for Merge class.
-#include "webrtc/modules/audio_coding/neteq4/merge.h"
+#include "webrtc/modules/audio_coding/neteq/merge.h"
#include <vector>
#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
-#include "webrtc/modules/audio_coding/neteq4/expand.h"
-#include "webrtc/modules/audio_coding/neteq4/random_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+#include "webrtc/modules/audio_coding/neteq/random_vector.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/min_distortion.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/min_distortion.c
deleted file mode 100644
index 47e2b442cd6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/min_distortion.c
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Calculate best overlap fit according to distortion measure.
- */
-
-#include "dsp_helpfunctions.h"
-
-#include "signal_processing_library.h"
-
-int16_t WebRtcNetEQ_MinDistortion(const int16_t *pw16_data,
- int16_t w16_minLag, int16_t w16_maxLag,
- int16_t len, int32_t *pw16_dist)
-{
- int i, j;
- const int16_t *pw16_data1;
- const int16_t *pw16_data2;
- int32_t w32_diff;
- int32_t w32_sumdiff;
- int16_t bestIndex = -1;
- int32_t minDist = WEBRTC_SPL_WORD32_MAX;
-
- for (i = w16_minLag; i <= w16_maxLag; i++)
- {
- w32_sumdiff = 0;
- pw16_data1 = pw16_data;
- pw16_data2 = pw16_data - i;
-
- for (j = 0; j < len; j++)
- {
- w32_diff = pw16_data1[j] - pw16_data2[j];
- w32_sumdiff += WEBRTC_SPL_ABS_W32(w32_diff);
- }
-
- /* Compare with previous minimum */
- if (w32_sumdiff < minDist)
- {
- minDist = w32_sumdiff;
- bestIndex = i;
- }
- }
-
- *pw16_dist = minDist;
-
- return bestIndex;
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mix_voice_unvoice.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/mix_voice_unvoice.c
deleted file mode 100644
index 6c70d4916a3..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mix_voice_unvoice.c
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This function mixes a voiced signal with an unvoiced signal and
- * updates the weight on a sample by sample basis.
- */
-
-#include "dsp_helpfunctions.h"
-
-#include "signal_processing_library.h"
-
-void WebRtcNetEQ_MixVoiceUnvoice(int16_t *pw16_outData, int16_t *pw16_voicedVec,
- int16_t *pw16_unvoicedVec,
- int16_t *w16_current_vfraction,
- int16_t w16_vfraction_change, int16_t N)
-{
- int i;
- int16_t w16_tmp2;
- int16_t vfraction = *w16_current_vfraction;
-
- w16_tmp2 = 16384 - vfraction;
- for (i = 0; i < N; i++)
- {
- pw16_outData[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
- WEBRTC_SPL_MUL_16_16(vfraction, pw16_voicedVec[i]) +
- WEBRTC_SPL_MUL_16_16(w16_tmp2, pw16_unvoicedVec[i]) + 8192,
- 14);
- vfraction -= w16_vfraction_change;
- w16_tmp2 += w16_vfraction_change;
- }
- *w16_current_vfraction = vfraction;
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_audio_decoder.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h
index a6d587447d6..edf3b54e9ea 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_audio_decoder.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h
@@ -8,10 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_AUDIO_DECODER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_AUDIO_DECODER_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_AUDIO_DECODER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_AUDIO_DECODER_H_
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
#include "gmock/gmock.h"
@@ -35,4 +35,4 @@ class MockAudioDecoder : public AudioDecoder {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_AUDIO_DECODER_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_AUDIO_DECODER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_audio_vector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_audio_vector.h
index 7a4747b0d31..a5a787c7aa4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_audio_vector.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_audio_vector.h
@@ -8,10 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_AUDIO_VECTOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_AUDIO_VECTOR_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_AUDIO_VECTOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_AUDIO_VECTOR_H_
-#include "webrtc/modules/audio_coding/neteq4/audio_vector.h"
+#include "webrtc/modules/audio_coding/neteq/audio_vector.h"
#include "gmock/gmock.h"
@@ -48,4 +48,4 @@ class MockAudioVector : public AudioVector {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_AUDIO_VECTOR_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_AUDIO_VECTOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_buffer_level_filter.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_buffer_level_filter.h
index 87265517290..d9210668dde 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_buffer_level_filter.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_buffer_level_filter.h
@@ -8,10 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_BUFFER_LEVEL_FILTER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_BUFFER_LEVEL_FILTER_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_BUFFER_LEVEL_FILTER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_BUFFER_LEVEL_FILTER_H_
-#include "webrtc/modules/audio_coding/neteq4/buffer_level_filter.h"
+#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
#include "gmock/gmock.h"
@@ -34,4 +34,4 @@ class MockBufferLevelFilter : public BufferLevelFilter {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_BUFFER_LEVEL_FILTER_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_BUFFER_LEVEL_FILTER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h
index c4ca25a5274..583fa54ba02 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h
@@ -8,10 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DECODER_DATABASE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DECODER_DATABASE_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DECODER_DATABASE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DECODER_DATABASE_H_
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
#include "gmock/gmock.h"
@@ -61,4 +61,4 @@ class MockDecoderDatabase : public DecoderDatabase {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DECODER_DATABASE_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DECODER_DATABASE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_delay_manager.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h
index 1edfb873716..c21a1c28c73 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_delay_manager.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h
@@ -8,10 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DELAY_MANAGER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DELAY_MANAGER_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DELAY_MANAGER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DELAY_MANAGER_H_
-#include "webrtc/modules/audio_coding/neteq4/delay_manager.h"
+#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
#include "gmock/gmock.h"
@@ -60,4 +60,4 @@ class MockDelayManager : public DelayManager {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DELAY_MANAGER_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DELAY_MANAGER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_delay_peak_detector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h
index 211b2b91e12..26e09329e69 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_delay_peak_detector.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h
@@ -8,10 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DELAY_PEAK_DETECTOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DELAY_PEAK_DETECTOR_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DELAY_PEAK_DETECTOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DELAY_PEAK_DETECTOR_H_
-#include "webrtc/modules/audio_coding/neteq4/delay_peak_detector.h"
+#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
#include "gmock/gmock.h"
@@ -31,4 +31,4 @@ class MockDelayPeakDetector : public DelayPeakDetector {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DELAY_PEAK_DETECTOR_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DELAY_PEAK_DETECTOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h
index 5a89db46f68..0351d6b1e51 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h
@@ -8,10 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DTMF_BUFFER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DTMF_BUFFER_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DTMF_BUFFER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DTMF_BUFFER_H_
-#include "webrtc/modules/audio_coding/neteq4/dtmf_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/dtmf_buffer.h"
#include "gmock/gmock.h"
@@ -35,4 +35,4 @@ class MockDtmfBuffer : public DtmfBuffer {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DTMF_BUFFER_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DTMF_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_tone_generator.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h
index d34f7470ef2..3bed4d152b9 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_tone_generator.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h
@@ -8,10 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DTMF_TONE_GENERATOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DTMF_TONE_GENERATOR_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DTMF_TONE_GENERATOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DTMF_TONE_GENERATOR_H_
-#include "webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h"
+#include "webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h"
#include "gmock/gmock.h"
@@ -32,4 +32,4 @@ class MockDtmfToneGenerator : public DtmfToneGenerator {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_DTMF_TONE_GENERATOR_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_DTMF_TONE_GENERATOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_external_decoder_pcm16b.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_external_decoder_pcm16b.h
index efc0c715837..9522b537e3e 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_external_decoder_pcm16b.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_external_decoder_pcm16b.h
@@ -8,14 +8,14 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_EXTERNAL_DECODER_PCM16B_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_EXTERNAL_DECODER_PCM16B_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_EXTERNAL_DECODER_PCM16B_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_EXTERNAL_DECODER_PCM16B_H_
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
#include "gmock/gmock.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -96,4 +96,4 @@ class MockExternalPcm16B : public ExternalPcm16B {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_EXTERNAL_DECODER_PCM16B_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_EXTERNAL_DECODER_PCM16B_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_packet_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h
index 37fa90de737..2882248c192 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_packet_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h
@@ -8,10 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_PACKET_BUFFER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_PACKET_BUFFER_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_PACKET_BUFFER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_PACKET_BUFFER_H_
-#include "webrtc/modules/audio_coding/neteq4/packet_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
#include "gmock/gmock.h"
@@ -19,8 +19,8 @@ namespace webrtc {
class MockPacketBuffer : public PacketBuffer {
public:
- MockPacketBuffer(size_t max_number_of_packets, size_t max_payload_memory)
- : PacketBuffer(max_number_of_packets, max_payload_memory) {}
+ MockPacketBuffer(size_t max_number_of_packets)
+ : PacketBuffer(max_number_of_packets) {}
virtual ~MockPacketBuffer() { Die(); }
MOCK_METHOD0(Die, void());
MOCK_METHOD0(Flush,
@@ -55,4 +55,4 @@ class MockPacketBuffer : public PacketBuffer {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_PACKET_BUFFER_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_PACKET_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_payload_splitter.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_payload_splitter.h
index f3d8c9b048d..f1665423afd 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/mock/mock_payload_splitter.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/mock/mock_payload_splitter.h
@@ -8,10 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_PAYLOAD_SPLITTER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_PAYLOAD_SPLITTER_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_PAYLOAD_SPLITTER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_PAYLOAD_SPLITTER_H_
-#include "webrtc/modules/audio_coding/neteq4/payload_splitter.h"
+#include "webrtc/modules/audio_coding/neteq/payload_splitter.h"
#include "gmock/gmock.h"
@@ -21,6 +21,8 @@ class MockPayloadSplitter : public PayloadSplitter {
public:
MOCK_METHOD1(SplitRed,
int(PacketList* packet_list));
+ MOCK_METHOD2(SplitFec,
+ int(PacketList* packet_list, DecoderDatabase* decoder_database));
MOCK_METHOD2(CheckRedPayloads,
int(PacketList* packet_list, const DecoderDatabase& decoder_database));
MOCK_METHOD2(SplitAudio,
@@ -34,4 +36,4 @@ class MockPayloadSplitter : public PayloadSplitter {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_MOCK_MOCK_PAYLOAD_SPLITTER_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_MOCK_MOCK_PAYLOAD_SPLITTER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/mute_signal.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/mute_signal.c
deleted file mode 100644
index 767a71dee1f..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/mute_signal.c
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This function mutes a signal linearly on a sample by sample basis.
- */
-
-#include "dsp_helpfunctions.h"
-
-#include "signal_processing_library.h"
-
-void WebRtcNetEQ_MuteSignal(int16_t *pw16_inout, int16_t muteSlope,
- int16_t N)
-{
- int i;
- int32_t w32_tmp = 1048608; /* (16384<<6 + 32) */
-
- for (i = 0; i < N; i++)
- {
- pw16_inout[i]
- = (int16_t) ((WEBRTC_SPL_MUL_16_16((int16_t)(w32_tmp>>6), pw16_inout[i])
- + 8192) >> 14);
- w32_tmp -= muteSlope;
- }
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.cc
new file mode 100644
index 00000000000..7edacde7633
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.cc
@@ -0,0 +1,62 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+
+#include "webrtc/modules/audio_coding/neteq/accelerate.h"
+#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
+#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
+#include "webrtc/modules/audio_coding/neteq/dtmf_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+#include "webrtc/modules/audio_coding/neteq/neteq_impl.h"
+#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/payload_splitter.h"
+#include "webrtc/modules/audio_coding/neteq/preemptive_expand.h"
+#include "webrtc/modules/audio_coding/neteq/timestamp_scaler.h"
+
+namespace webrtc {
+
+// Creates all classes needed and inject them into a new NetEqImpl object.
+// Return the new object.
+NetEq* NetEq::Create(const NetEq::Config& config) {
+ BufferLevelFilter* buffer_level_filter = new BufferLevelFilter;
+ DecoderDatabase* decoder_database = new DecoderDatabase;
+ DelayPeakDetector* delay_peak_detector = new DelayPeakDetector;
+ DelayManager* delay_manager =
+ new DelayManager(config.max_packets_in_buffer, delay_peak_detector);
+ delay_manager->SetMaximumDelay(config.max_delay_ms);
+ DtmfBuffer* dtmf_buffer = new DtmfBuffer(config.sample_rate_hz);
+ DtmfToneGenerator* dtmf_tone_generator = new DtmfToneGenerator;
+ PacketBuffer* packet_buffer = new PacketBuffer(config.max_packets_in_buffer);
+ PayloadSplitter* payload_splitter = new PayloadSplitter;
+ TimestampScaler* timestamp_scaler = new TimestampScaler(*decoder_database);
+ AccelerateFactory* accelerate_factory = new AccelerateFactory;
+ ExpandFactory* expand_factory = new ExpandFactory;
+ PreemptiveExpandFactory* preemptive_expand_factory =
+ new PreemptiveExpandFactory;
+ return new NetEqImpl(config.sample_rate_hz,
+ buffer_level_filter,
+ decoder_database,
+ delay_manager,
+ delay_peak_detector,
+ dtmf_buffer,
+ dtmf_tone_generator,
+ packet_buffer,
+ payload_splitter,
+ timestamp_scaler,
+ accelerate_factory,
+ expand_factory,
+ preemptive_expand_factory);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi
index 27e5c37aab7..21ccee41e1c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq.gypi
@@ -7,242 +7,208 @@
# be found in the AUTHORS file in the root of the source tree.
{
+ 'variables': {
+ 'codecs': [
+ 'G711',
+ 'G722',
+ 'PCM16B',
+ 'iLBC',
+ 'iSAC',
+ 'iSACFix',
+ 'CNG',
+ ],
+ 'neteq_defines': [],
+ 'conditions': [
+ ['include_opus==1', {
+ 'codecs': ['webrtc_opus',],
+ 'neteq_defines': ['WEBRTC_CODEC_OPUS',],
+ }],
+ ],
+ 'neteq_dependencies': [
+ '<@(codecs)',
+ '<(DEPTH)/third_party/opus/opus.gyp:opus',
+ '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ ],
+ },
'targets': [
{
- 'target_name': 'NetEq',
+ 'target_name': 'neteq',
'type': 'static_library',
'dependencies': [
- 'CNG',
- '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
+ '<@(neteq_dependencies)',
],
'defines': [
- 'NETEQ_VOICEENGINE_CODECS', # TODO: Should create a Chrome define which
- 'SCRATCH', # specifies a subset of codecs to support.
+ '<@(neteq_defines)',
],
'include_dirs': [
- 'interface',
- '<(webrtc_root)',
+ # Need Opus header files for the audio classifier.
+ '<(DEPTH)/third_party/opus/src/celt',
+ '<(DEPTH)/third_party/opus/src/src',
],
'direct_dependent_settings': {
'include_dirs': [
- 'interface',
- '<(webrtc_root)',
+ # Need Opus header files for the audio classifier.
+ '<(DEPTH)/third_party/opus/src/celt',
+ '<(DEPTH)/third_party/opus/src/src',
],
},
+ 'export_dependent_settings': [
+ '<(DEPTH)/third_party/opus/opus.gyp:opus',
+ ],
'sources': [
- 'interface/webrtc_neteq.h',
- 'interface/webrtc_neteq_help_macros.h',
- 'interface/webrtc_neteq_internal.h',
- 'accelerate.c',
- 'automode.c',
- 'automode.h',
- 'bgn_update.c',
- 'buffer_stats.h',
- 'bufstats_decision.c',
- 'cng_internal.c',
- 'codec_db.c',
- 'codec_db.h',
- 'codec_db_defines.h',
- 'correlator.c',
- 'delay_logging.h',
- 'dsp.c',
- 'dsp.h',
- 'dsp_helpfunctions.c',
- 'dsp_helpfunctions.h',
- 'dtmf_buffer.c',
+ 'interface/audio_decoder.h',
+ 'interface/neteq.h',
+ 'accelerate.cc',
+ 'accelerate.h',
+ 'audio_classifier.cc',
+ 'audio_classifier.h',
+ 'audio_decoder_impl.cc',
+ 'audio_decoder_impl.h',
+ 'audio_decoder.cc',
+ 'audio_multi_vector.cc',
+ 'audio_multi_vector.h',
+ 'audio_vector.cc',
+ 'audio_vector.h',
+ 'background_noise.cc',
+ 'background_noise.h',
+ 'buffer_level_filter.cc',
+ 'buffer_level_filter.h',
+ 'comfort_noise.cc',
+ 'comfort_noise.h',
+ 'decision_logic.cc',
+ 'decision_logic.h',
+ 'decision_logic_fax.cc',
+ 'decision_logic_fax.h',
+ 'decision_logic_normal.cc',
+ 'decision_logic_normal.h',
+ 'decoder_database.cc',
+ 'decoder_database.h',
+ 'defines.h',
+ 'delay_manager.cc',
+ 'delay_manager.h',
+ 'delay_peak_detector.cc',
+ 'delay_peak_detector.h',
+ 'dsp_helper.cc',
+ 'dsp_helper.h',
+ 'dtmf_buffer.cc',
'dtmf_buffer.h',
- 'dtmf_tonegen.c',
- 'dtmf_tonegen.h',
- 'expand.c',
- 'mcu.h',
- 'mcu_address_init.c',
- 'mcu_dsp_common.c',
- 'mcu_dsp_common.h',
- 'mcu_reset.c',
- 'merge.c',
- 'min_distortion.c',
- 'mix_voice_unvoice.c',
- 'mute_signal.c',
- 'neteq_defines.h',
- 'neteq_error_codes.h',
- 'neteq_statistics.h',
- 'normal.c',
- 'packet_buffer.c',
+ 'dtmf_tone_generator.cc',
+ 'dtmf_tone_generator.h',
+ 'expand.cc',
+ 'expand.h',
+ 'merge.cc',
+ 'merge.h',
+ 'neteq_impl.cc',
+ 'neteq_impl.h',
+ 'neteq.cc',
+ 'statistics_calculator.cc',
+ 'statistics_calculator.h',
+ 'normal.cc',
+ 'normal.h',
+ 'packet_buffer.cc',
'packet_buffer.h',
- 'peak_detection.c',
- 'preemptive_expand.c',
- 'random_vector.c',
- 'recin.c',
- 'recout.c',
- 'rtcp.c',
+ 'payload_splitter.cc',
+ 'payload_splitter.h',
+ 'post_decode_vad.cc',
+ 'post_decode_vad.h',
+ 'preemptive_expand.cc',
+ 'preemptive_expand.h',
+ 'random_vector.cc',
+ 'random_vector.h',
+ 'rtcp.cc',
'rtcp.h',
- 'rtp.c',
- 'rtp.h',
- 'set_fs.c',
- 'signal_mcu.c',
- 'split_and_insert.c',
- 'unmute_signal.c',
- 'webrtc_neteq.c',
+ 'sync_buffer.cc',
+ 'sync_buffer.h',
+ 'timestamp_scaler.cc',
+ 'timestamp_scaler.h',
+ 'time_stretch.cc',
+ 'time_stretch.h',
],
},
], # targets
'conditions': [
['include_tests==1', {
+ 'includes': ['neteq_tests.gypi',],
'targets': [
{
- 'target_name': 'neteq_unittests',
+ 'target_name': 'audio_decoder_unittests',
'type': '<(gtest_target_type)',
'dependencies': [
- 'NetEq',
- 'NetEqTestTools',
- 'neteq_unittest_tools',
+ '<@(codecs)',
'<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
'<(webrtc_root)/test/test.gyp:test_support_main',
],
- 'sources': [
- 'webrtc_neteq_unittest.cc',
+ 'defines': [
+ 'AUDIO_DECODER_UNITTEST',
+ 'WEBRTC_CODEC_G722',
+ 'WEBRTC_CODEC_ILBC',
+ 'WEBRTC_CODEC_ISACFX',
+ 'WEBRTC_CODEC_ISAC',
+ 'WEBRTC_CODEC_PCM16',
+ '<@(neteq_defines)',
],
- # Disable warnings to enable Win64 build, issue 1323.
- 'msvs_disabled_warnings': [
- 4267, # size_t to int truncation.
+ 'sources': [
+ 'audio_decoder_impl.cc',
+ 'audio_decoder_impl.h',
+ 'audio_decoder_unittest.cc',
+ 'audio_decoder.cc',
+ 'interface/audio_decoder.h',
],
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are
# using Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'dependencies': [
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
}],
],
- }, # neteq_unittests
- {
- 'target_name': 'NetEqRTPplay',
- 'type': 'executable',
- 'dependencies': [
- 'NetEq', # NetEQ library defined above
- 'NetEqTestTools', # Test helpers
- 'G711',
- 'G722',
- 'PCM16B',
- 'iLBC',
- 'iSAC',
- 'CNG',
- ],
- 'defines': [
- # TODO: Make codec selection conditional on definitions in target NetEq
- 'CODEC_ILBC',
- 'CODEC_PCM16B',
- 'CODEC_G711',
- 'CODEC_G722',
- 'CODEC_ISAC',
- 'CODEC_PCM16B_WB',
- 'CODEC_ISAC_SWB',
- 'CODEC_ISAC_FB',
- 'CODEC_PCM16B_32KHZ',
- 'CODEC_CNGCODEC8',
- 'CODEC_CNGCODEC16',
- 'CODEC_CNGCODEC32',
- 'CODEC_ATEVENT_DECODE',
- 'CODEC_RED',
- ],
- 'include_dirs': [
- '.',
- 'test',
- ],
- 'sources': [
- 'test/NetEqRTPplay.cc',
- ],
- # Disable warnings to enable Win64 build, issue 1323.
- 'msvs_disabled_warnings': [
- 4267, # size_t to int truncation.
- ],
- },
-
- {
- 'target_name': 'neteq3_speed_test',
- 'type': 'executable',
- 'dependencies': [
- 'NetEq',
- 'PCM16B',
- 'neteq_unittest_tools',
- '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
- ],
- 'sources': [
- 'test/neteq_speed_test.cc',
- ],
- },
+ }, # audio_decoder_unittests
{
- 'target_name': 'NetEqTestTools',
- # Collection of useful functions used in other tests
+ 'target_name': 'neteq_unittest_tools',
'type': 'static_library',
- 'variables': {
- # Expects RTP packets without payloads when enabled.
- 'neteq_dummy_rtp%': 0,
- },
'dependencies': [
- 'G711',
- 'G722',
- 'PCM16B',
- 'iLBC',
- 'iSAC',
- 'CNG',
- '<(DEPTH)/testing/gtest.gyp:gtest',
+ 'rtp_rtcp',
],
'direct_dependent_settings': {
'include_dirs': [
- 'interface',
- 'test',
+ 'tools',
],
},
- 'defines': [
- # TODO: Make codec selection conditional on definitions in target NetEq
- 'CODEC_ILBC',
- 'CODEC_PCM16B',
- 'CODEC_G711',
- 'CODEC_G722',
- 'CODEC_ISAC',
- 'CODEC_PCM16B_WB',
- 'CODEC_ISAC_SWB',
- 'CODEC_ISAC_FB',
- 'CODEC_PCM16B_32KHZ',
- 'CODEC_CNGCODEC8',
- 'CODEC_CNGCODEC16',
- 'CODEC_CNGCODEC32',
- 'CODEC_ATEVENT_DECODE',
- 'CODEC_RED',
- ],
'include_dirs': [
- 'interface',
- 'test',
+ 'tools',
],
'sources': [
- 'test/NETEQTEST_CodecClass.cc',
- 'test/NETEQTEST_CodecClass.h',
- 'test/NETEQTEST_DummyRTPpacket.cc',
- 'test/NETEQTEST_DummyRTPpacket.h',
- 'test/NETEQTEST_NetEQClass.cc',
- 'test/NETEQTEST_NetEQClass.h',
- 'test/NETEQTEST_RTPpacket.cc',
- 'test/NETEQTEST_RTPpacket.h',
- ],
- # Disable warnings to enable Win64 build, issue 1323.
- 'msvs_disabled_warnings': [
- 4267, # size_t to int truncation.
- ],
- },
+ 'tools/audio_checksum.h',
+ 'tools/audio_loop.cc',
+ 'tools/audio_loop.h',
+ 'tools/audio_sink.h',
+ 'tools/input_audio_file.cc',
+ 'tools/input_audio_file.h',
+ 'tools/output_audio_file.h',
+ 'tools/packet.cc',
+ 'tools/packet.h',
+ 'tools/packet_source.h',
+ 'tools/rtp_file_source.cc',
+ 'tools/rtp_file_source.h',
+ 'tools/rtp_generator.cc',
+ 'tools/rtp_generator.h',
+ ],
+ }, # neteq_unittest_tools
], # targets
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are using
# Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'targets': [
{
- 'target_name': 'neteq_unittests_apk_target',
+ 'target_name': 'audio_decoder_unittests_apk_target',
'type': 'none',
'dependencies': [
- '<(apk_tests_path):neteq_unittests_apk',
+ '<(apk_tests_path):audio_decoder_unittests_apk',
],
},
],
@@ -250,17 +216,17 @@
['test_isolation_mode != "noop"', {
'targets': [
{
- 'target_name': 'neteq_unittests_run',
+ 'target_name': 'audio_decoder_unittests_run',
'type': 'none',
'dependencies': [
- 'neteq_unittests',
+ 'audio_decoder_unittests',
],
'includes': [
'../../../build/isolate.gypi',
- 'neteq_unittests.isolate',
+ 'audio_decoder_unittests.isolate',
],
'sources': [
- 'neteq_unittests.isolate',
+ 'audio_decoder_unittests.isolate',
],
},
],
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_defines.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_defines.h
deleted file mode 100644
index b3b3da5b70e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_defines.h
+++ /dev/null
@@ -1,374 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*****************************************************************************************
- *
- * Compilation flags in NetEQ:
- *
- *****************************************************************************************
- *
- ***** Platform flags ******
- *
- * SCRATCH Run NetEQ with "Scratch memory" to save some stack memory.
- * Definition can be used on all platforms
- *
- ***** Summary flags ******
- *
- * NETEQ_ALL_SPECIAL_CODECS Add support for special codecs (CN/RED/DTMF)
- *
- * NETEQ_ALL_NB_CODECS Add support for all NB codecs (except CN/RED/DTMF)
- *
- * NETEQ_ALL_WB_CODECS Add support for all WB codecs (except CN/RED/DTMF)
- *
- * NETEQ_VOICEENGINE_CODECS Support for all NB, WB and SWB32 codecs and CN, RED and DTMF
- *
- * NETEQ_ALL_CODECS Support for all NB, WB, SWB 32kHz and SWB 48kHz as well as
- * CN, RED and DTMF
- *
- ***** Sampling frequency ******
- * (Note: usually not needed when Summary flags are used)
- *
- * NETEQ_WIDEBAND Wideband enabled
- *
- * NETEQ_32KHZ_WIDEBAND Super wideband @ 32kHz enabled
- *
- * NETEQ_48KHZ_WIDEBAND Super wideband @ 48kHz enabled
- *
- ***** Special Codec ******
- * (Note: not needed if NETEQ_ALL_CODECS is used)
- *
- * NETEQ_RED_CODEC With this flag you enable NetEQ to understand redundancy in
- * the RTP. NetEQ will use the redundancy if it's the same
- * codec
- *
- * NETEQ_CNG_CODEC Enable DTX with the CN payload
- *
- * NETEQ_ATEVENT_DECODE Enable AVT event and play out the corresponding DTMF tone
- *
- ***** Speech Codecs *****
- * (Note: Not needed if Summary flags are used)
- *
- * NETEQ_G711_CODEC Enable G.711 u- and A-law
- *
- * NETEQ_PCM16B_CODEC Enable uncompressed 16-bit
- *
- * NETEQ_ILBC_CODEC Enable iLBC
- *
- * NETEQ_ISAC_CODEC Enable iSAC
- *
- * NETEQ_ISAC_SWB_CODEC Enable iSAC-SWB
- *
- * Note that the decoder of iSAC full-band operates at 32 kHz, that is the
- * decoded signal is at 32 kHz.
- * NETEQ_ISAC_FB_CODEC Enable iSAC-FB
- *
- * NETEQ_G722_CODEC Enable G.722
- *
- * NETEQ_G729_CODEC Enable G.729
- *
- * NETEQ_G729_1_CODEC Enable G.729.1
- *
- * NETEQ_G726_CODEC Enable G.726
- *
- * NETEQ_G722_1_CODEC Enable G722.1
- *
- * NETEQ_G722_1C_CODEC Enable G722.1 Annex C
- *
- * NETEQ_OPUS_CODEC Enable Opus
- *
- * NETEQ_SPEEX_CODEC Enable Speex (at 8 and 16 kHz sample rate)
- *
- * NETEQ_CELT_CODEC Enable Celt (at 32 kHz sample rate)
- *
- * NETEQ_GSMFR_CODEC Enable GSM-FR
- *
- * NETEQ_AMR_CODEC Enable AMR (narrowband)
- *
- * NETEQ_AMRWB_CODEC Enable AMR-WB
- *
- * NETEQ_CNG_CODEC Enable DTX with the CNG payload
- *
- * NETEQ_ATEVENT_DECODE Enable AVT event and play out the corresponding DTMF tone
- *
- ***** Test flags ******
- *
- * WEBRTC_NETEQ_40BITACC_TEST Run NetEQ with simulated 40-bit accumulator to run
- * bit-exact to a DSP implementation where the main (splib
- * and NetEQ) functions have been 40-bit optimized
- *
- *****************************************************************************************
- */
-
-#if !defined NETEQ_DEFINES_H
-#define NETEQ_DEFINES_H
-
-/* Data block structure for MCU to DSP communication:
- *
- *
- * First 3 16-bit words are pre-header that contains instructions and timestamp update
- * Fourth 16-bit word is length of data block 1
- * Rest is payload data
- *
- * 0 48 64 80
- * -------------...----------------------------------------------------------------------
- * | PreHeader ... | Length 1 | Payload data 1 ...... | Lenght 2| Data block 2.... | ...
- * -------------...----------------------------------------------------------------------
- *
- *
- * Preheader:
- * 4 MSB can be either of:
- */
-
-#define DSP_INSTR_NORMAL 0x1000
-/* Payload data will contain the encoded frames */
-
-#define DSP_INSTR_MERGE 0x2000
-/* Payload data block 1 will contain the encoded frame */
-/* Info block will contain the number of missing samples */
-
-#define DSP_INSTR_EXPAND 0x3000
-/* Payload data will be empty */
-
-#define DSP_INSTR_ACCELERATE 0x4000
-/* Payload data will contain the encoded frame */
-
-#define DSP_INSTR_DO_RFC3389CNG 0x5000
-/* Payload data will contain the SID frame if there is one*/
-
-#define DSP_INSTR_DTMF_GENERATE 0x6000
-/* Payload data will be one int16_t with the current DTMF value and one
- * int16_t with the current volume value
- */
-#define DSP_INSTR_NORMAL_ONE_DESC 0x7000
-/* No encoded frames */
-
-#define DSP_INSTR_DO_CODEC_INTERNAL_CNG 0x8000
-/* Codec has a built-in VAD/DTX scheme (use the above for "no transmission") */
-
-#define DSP_INSTR_PREEMPTIVE_EXPAND 0x9000
-/* Payload data will contain the encoded frames, if any */
-
-#define DSP_INSTR_DO_ALTERNATIVE_PLC 0xB000
-/* NetEQ switched off and packet missing... */
-
-#define DSP_INSTR_DO_ALTERNATIVE_PLC_INC_TS 0xC000
-/* NetEQ switched off and packet missing... */
-
-#define DSP_INSTR_DO_AUDIO_REPETITION 0xD000
-/* NetEQ switched off and packet missing... */
-
-#define DSP_INSTR_DO_AUDIO_REPETITION_INC_TS 0xE000
-/* NetEQ switched off and packet missing... */
-
-#define DSP_INSTR_FADE_TO_BGN 0xF000
-/* Exception handling: fade out to BGN (expand) */
-
-/*
- * Next 4 bits signal additional data that needs to be transmitted
- */
-
-#define DSP_CODEC_NO_CHANGE 0x0100
-#define DSP_CODEC_NEW_CODEC 0x0200
-#define DSP_CODEC_ADD_LATE_PKT 0x0300
-#define DSP_CODEC_RESET 0x0400
-#define DSP_DTMF_PAYLOAD 0x0010
-
-/*
- * The most significant bit of the payload-length
- * is used to flag whether the associated payload
- * is redundant payload. This currently useful only for
- * iSAC, where redundant payloads have to be treated
- * differently. Every time the length is read it must be
- * masked by DSP_CODEC_MASK_RED_FLAG to ignore the flag.
- * Use DSP_CODEC_RED_FLAG to set or retrieve the flag.
- */
-#define DSP_CODEC_MASK_RED_FLAG 0x7FFF
-#define DSP_CODEC_RED_FLAG 0x8000
-
-/*
- * The first block of payload data consist of decode function pointers,
- * and then the speech blocks.
- *
- */
-
-
-/*
- * The playout modes that NetEq produced (i.e. gives more info about if the
- * Accelerate was successful or not)
- */
-
-#define MODE_NORMAL 0x0000
-#define MODE_EXPAND 0x0001
-#define MODE_MERGE 0x0002
-#define MODE_SUCCESS_ACCELERATE 0x0003
-#define MODE_UNSUCCESS_ACCELERATE 0x0004
-#define MODE_RFC3389CNG 0x0005
-#define MODE_LOWEN_ACCELERATE 0x0006
-#define MODE_DTMF 0x0007
-#define MODE_ONE_DESCRIPTOR 0x0008
-#define MODE_CODEC_INTERNAL_CNG 0x0009
-#define MODE_SUCCESS_PREEMPTIVE 0x000A
-#define MODE_UNSUCCESS_PREEMPTIVE 0x000B
-#define MODE_LOWEN_PREEMPTIVE 0x000C
-#define MODE_FADE_TO_BGN 0x000D
-
-#define MODE_ERROR 0x0010
-
-#define MODE_AWAITING_CODEC_PTR 0x0100
-
-#define MODE_BGN_ONLY 0x0200
-
-#define MODE_MASTER_DTMF_SIGNAL 0x0400
-
-#define MODE_USING_STEREO 0x0800
-
-
-
-/***********************/
-/* Group codec defines */
-/***********************/
-
-#if (defined(NETEQ_ALL_SPECIAL_CODECS))
- #define NETEQ_CNG_CODEC
- #define NETEQ_ATEVENT_DECODE
- #define NETEQ_RED_CODEC
- #define NETEQ_VAD
- #define NETEQ_ARBITRARY_CODEC
-#endif
-
-#if (defined(NETEQ_ALL_NB_CODECS)) /* Except RED, DTMF and CNG */
- #define NETEQ_PCM16B_CODEC
- #define NETEQ_G711_CODEC
- #define NETEQ_ILBC_CODEC
- #define NETEQ_G729_CODEC
- #define NETEQ_G726_CODEC
- #define NETEQ_GSMFR_CODEC
- #define NETEQ_OPUS_CODEC
- #define NETEQ_AMR_CODEC
-#endif
-
-#if (defined(NETEQ_ALL_WB_CODECS)) /* Except RED, DTMF and CNG */
- #define NETEQ_ISAC_CODEC
- #define NETEQ_G722_CODEC
- #define NETEQ_G722_1_CODEC
- #define NETEQ_G729_1_CODEC
- #define NETEQ_OPUS_CODEC
- #define NETEQ_SPEEX_CODEC
- #define NETEQ_AMRWB_CODEC
- #define NETEQ_WIDEBAND
-#endif
-
-#if (defined(NETEQ_ALL_WB32_CODECS)) /* AAC, RED, DTMF and CNG */
- #define NETEQ_ISAC_SWB_CODEC
- #define NETEQ_32KHZ_WIDEBAND
- #define NETEQ_G722_1C_CODEC
- #define NETEQ_CELT_CODEC
- #define NETEQ_OPUS_CODEC
-#endif
-
-#if (defined(NETEQ_VOICEENGINE_CODECS))
- /* Special codecs */
- #define NETEQ_CNG_CODEC
- #define NETEQ_ATEVENT_DECODE
- #define NETEQ_RED_CODEC
- #define NETEQ_VAD
- #define NETEQ_ARBITRARY_CODEC
-
- /* Narrowband codecs */
- #define NETEQ_PCM16B_CODEC
- #define NETEQ_G711_CODEC
- #define NETEQ_ILBC_CODEC
- #define NETEQ_AMR_CODEC
- #define NETEQ_G729_CODEC
- #define NETEQ_GSMFR_CODEC
-
- /* Wideband codecs */
- #define NETEQ_WIDEBAND
- #define NETEQ_ISAC_CODEC
- #define NETEQ_G722_CODEC
- #define NETEQ_G722_1_CODEC
- #define NETEQ_G729_1_CODEC
- #define NETEQ_AMRWB_CODEC
- #define NETEQ_SPEEX_CODEC
-
- /* Super wideband 32kHz codecs */
- #define NETEQ_ISAC_SWB_CODEC
- #define NETEQ_32KHZ_WIDEBAND
- #define NETEQ_G722_1C_CODEC
- #define NETEQ_CELT_CODEC
-
- /* Fullband 48 kHz codecs */
- #define NETEQ_OPUS_CODEC
- #define NETEQ_ISAC_FB_CODEC
-#endif
-
-#if (defined(NETEQ_ALL_CODECS))
- /* Special codecs */
- #define NETEQ_CNG_CODEC
- #define NETEQ_ATEVENT_DECODE
- #define NETEQ_RED_CODEC
- #define NETEQ_VAD
- #define NETEQ_ARBITRARY_CODEC
-
- /* Narrowband codecs */
- #define NETEQ_PCM16B_CODEC
- #define NETEQ_G711_CODEC
- #define NETEQ_ILBC_CODEC
- #define NETEQ_G729_CODEC
- #define NETEQ_G726_CODEC
- #define NETEQ_GSMFR_CODEC
- #define NETEQ_AMR_CODEC
-
- /* Wideband codecs */
- #define NETEQ_WIDEBAND
- #define NETEQ_ISAC_CODEC
- #define NETEQ_G722_CODEC
- #define NETEQ_G722_1_CODEC
- #define NETEQ_G729_1_CODEC
- #define NETEQ_SPEEX_CODEC
- #define NETEQ_AMRWB_CODEC
-
- /* Super wideband 32kHz codecs */
- #define NETEQ_ISAC_SWB_CODEC
- #define NETEQ_32KHZ_WIDEBAND
- #define NETEQ_G722_1C_CODEC
- #define NETEQ_CELT_CODEC
-
- /* Super wideband 48kHz codecs */
- #define NETEQ_48KHZ_WIDEBAND
- #define NETEQ_OPUS_CODEC
- #define NETEQ_ISAC_FB_CODEC
-#endif
-
-/* Max output size from decoding one frame */
-#if defined(NETEQ_48KHZ_WIDEBAND)
- #define NETEQ_MAX_FRAME_SIZE 5760 /* 120 ms super wideband */
- #define NETEQ_MAX_OUTPUT_SIZE 6480 /* 120+15 ms super wideband (120 ms
- * decoded + 15 ms for merge overlap) */
-#elif defined(NETEQ_32KHZ_WIDEBAND)
- #define NETEQ_MAX_FRAME_SIZE 3840 /* 120 ms super wideband */
- #define NETEQ_MAX_OUTPUT_SIZE 4320 /* 120+15 ms super wideband (120 ms
- * decoded + 15 ms for merge overlap) */
-#elif defined(NETEQ_WIDEBAND)
- #define NETEQ_MAX_FRAME_SIZE 1920 /* 120 ms wideband */
- #define NETEQ_MAX_OUTPUT_SIZE 2160 /* 120+15 ms wideband (120 ms decoded +
- * 15 ms for merge overlap) */
-#else
- #define NETEQ_MAX_FRAME_SIZE 960 /* 120 ms narrowband */
- #define NETEQ_MAX_OUTPUT_SIZE 1080 /* 120+15 ms narrowband (120 ms decoded
- * + 15 ms for merge overlap) */
-#endif
-
-
-/* Enable stereo */
-#define NETEQ_STEREO
-
-#endif /* #if !defined NETEQ_DEFINES_H */
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_error_codes.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_error_codes.h
deleted file mode 100644
index ab639d9c3e6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_error_codes.h
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Definition of error codes.
- *
- * NOTE: When modifying the error codes,
- * also modify the function WebRtcNetEQ_GetErrorCode!
- */
-
-#if !defined NETEQ_ERROR_CODES_H
-#define NETEQ_ERROR_CODES_H
-
-/* Misc Error */
-#define NETEQ_OTHER_ERROR -1000
-
-/* Misc Recout Errors */
-#define FAULTY_INSTRUCTION -1001
-#define FAULTY_NETWORK_TYPE -1002
-#define FAULTY_DELAYVALUE -1003
-#define FAULTY_PLAYOUTMODE -1004
-#define CORRUPT_INSTANCE -1005
-#define ILLEGAL_MASTER_SLAVE_SWITCH -1006
-#define MASTER_SLAVE_ERROR -1007
-
-/* Misc Recout problems */
-#define UNKNOWN_BUFSTAT_DECISION -2001
-#define RECOUT_ERROR_DECODING -2002
-#define RECOUT_ERROR_SAMPLEUNDERRUN -2003
-#define RECOUT_ERROR_DECODED_TOO_MUCH -2004
-
-/* Misc RecIn problems */
-#define RECIN_CNG_ERROR -3001
-#define RECIN_UNKNOWNPAYLOAD -3002
-#define RECIN_BUFFERINSERT_ERROR -3003
-#define RECIN_SYNC_RTP_CHANGED_CODEC -3004
-#define RECIN_SYNC_RTP_NOT_ACCEPTABLE -3005
-
-/* PBUFFER/BUFSTAT ERRORS */
-#define PBUFFER_INIT_ERROR -4001
-#define PBUFFER_INSERT_ERROR1 -4002
-#define PBUFFER_INSERT_ERROR2 -4003
-#define PBUFFER_INSERT_ERROR3 -4004
-#define PBUFFER_INSERT_ERROR4 -4005
-#define PBUFFER_INSERT_ERROR5 -4006
-#define UNKNOWN_G723_HEADER -4007
-#define PBUFFER_NONEXISTING_PACKET -4008
-#define PBUFFER_NOT_INITIALIZED -4009
-#define AMBIGUOUS_ILBC_FRAME_SIZE -4010
-
-/* CODEC DATABASE ERRORS */
-#define CODEC_DB_FULL -5001
-#define CODEC_DB_NOT_EXIST1 -5002
-#define CODEC_DB_NOT_EXIST2 -5003
-#define CODEC_DB_NOT_EXIST3 -5004
-#define CODEC_DB_NOT_EXIST4 -5005
-#define CODEC_DB_UNKNOWN_CODEC -5006
-#define CODEC_DB_PAYLOAD_TAKEN -5007
-#define CODEC_DB_UNSUPPORTED_CODEC -5008
-#define CODEC_DB_UNSUPPORTED_FS -5009
-
-/* DTMF ERRORS */
-#define DTMF_DEC_PARAMETER_ERROR -6001
-#define DTMF_INSERT_ERROR -6002
-#define DTMF_GEN_UNKNOWN_SAMP_FREQ -6003
-#define DTMF_NOT_SUPPORTED -6004
-
-/* RTP/PACKET ERRORS */
-#define RED_SPLIT_ERROR1 -7001
-#define RED_SPLIT_ERROR2 -7002
-#define RTP_TOO_SHORT_PACKET -7003
-#define RTP_CORRUPT_PACKET -7004
-
-#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_external_decoder_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc
index fec25e985e1..a40107651d0 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_external_decoder_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_external_decoder_unittest.cc
@@ -15,10 +15,10 @@
#include "gmock/gmock.h"
#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_external_decoder_pcm16b.h"
-#include "webrtc/modules/audio_coding/neteq4/tools/input_audio_file.h"
-#include "webrtc/modules/audio_coding/neteq4/tools/rtp_generator.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_external_decoder_pcm16b.h"
+#include "webrtc/modules/audio_coding/neteq/tools/input_audio_file.h"
+#include "webrtc/modules/audio_coding/neteq/tools/rtp_generator.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/test/testsupport/gtest_disable.h"
@@ -45,13 +45,15 @@ class NetEqExternalDecoderTest : public ::testing::Test {
frame_size_ms_(10),
frame_size_samples_(frame_size_ms_ * samples_per_ms_),
output_size_samples_(frame_size_ms_ * samples_per_ms_),
- neteq_external_(NetEq::Create(sample_rate_hz_)),
- neteq_(NetEq::Create(sample_rate_hz_)),
external_decoder_(new MockExternalPcm16B(kDecoderPCM16Bswb32kHz)),
rtp_generator_(samples_per_ms_),
payload_size_bytes_(0),
last_send_time_(0),
last_arrival_time_(0) {
+ NetEq::Config config;
+ config.sample_rate_hz = sample_rate_hz_;
+ neteq_external_ = NetEq::Create(config);
+ neteq_ = NetEq::Create(config);
input_ = new int16_t[frame_size_samples_];
encoded_ = new uint8_t[2 * frame_size_samples_];
}
@@ -78,7 +80,6 @@ class NetEqExternalDecoderTest : public ::testing::Test {
ASSERT_EQ(NetEq::kOK,
neteq_external_->RegisterExternalDecoder(external_decoder_,
decoder,
- sample_rate_hz_,
kPayloadType));
ASSERT_EQ(NetEq::kOK,
neteq_->RegisterPayloadType(decoder, kPayloadType));
@@ -202,7 +203,7 @@ class NetEqExternalDecoderTest : public ::testing::Test {
scoped_ptr<test::InputAudioFile> input_file_;
};
-TEST_F(NetEqExternalDecoderTest, DISABLED_ON_ANDROID(RunTest)) {
+TEST_F(NetEqExternalDecoderTest, RunTest) {
RunTest(100); // Run 100 laps @ 10 ms each in the test loop.
}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc
index fb27af2cff8..64a86603943 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/neteq_impl.h"
+#include "webrtc/modules/audio_coding/neteq/neteq_impl.h"
#include <assert.h>
#include <memory.h> // memset
@@ -16,28 +16,28 @@
#include <algorithm>
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/audio_coding/neteq4/accelerate.h"
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
-#include "webrtc/modules/audio_coding/neteq4/buffer_level_filter.h"
-#include "webrtc/modules/audio_coding/neteq4/comfort_noise.h"
-#include "webrtc/modules/audio_coding/neteq4/decision_logic.h"
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/defines.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_manager.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_peak_detector.h"
-#include "webrtc/modules/audio_coding/neteq4/dtmf_buffer.h"
-#include "webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h"
-#include "webrtc/modules/audio_coding/neteq4/expand.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-#include "webrtc/modules/audio_coding/neteq4/merge.h"
-#include "webrtc/modules/audio_coding/neteq4/normal.h"
-#include "webrtc/modules/audio_coding/neteq4/packet_buffer.h"
-#include "webrtc/modules/audio_coding/neteq4/packet.h"
-#include "webrtc/modules/audio_coding/neteq4/payload_splitter.h"
-#include "webrtc/modules/audio_coding/neteq4/post_decode_vad.h"
-#include "webrtc/modules/audio_coding/neteq4/preemptive_expand.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
-#include "webrtc/modules/audio_coding/neteq4/timestamp_scaler.h"
+#include "webrtc/modules/audio_coding/neteq/accelerate.h"
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/buffer_level_filter.h"
+#include "webrtc/modules/audio_coding/neteq/comfort_noise.h"
+#include "webrtc/modules/audio_coding/neteq/decision_logic.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/defines.h"
+#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
+#include "webrtc/modules/audio_coding/neteq/delay_peak_detector.h"
+#include "webrtc/modules/audio_coding/neteq/dtmf_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/merge.h"
+#include "webrtc/modules/audio_coding/neteq/normal.h"
+#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h"
+#include "webrtc/modules/audio_coding/neteq/payload_splitter.h"
+#include "webrtc/modules/audio_coding/neteq/post_decode_vad.h"
+#include "webrtc/modules/audio_coding/neteq/preemptive_expand.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/timestamp_scaler.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
@@ -58,8 +58,13 @@ NetEqImpl::NetEqImpl(int fs,
DtmfToneGenerator* dtmf_tone_generator,
PacketBuffer* packet_buffer,
PayloadSplitter* payload_splitter,
- TimestampScaler* timestamp_scaler)
- : buffer_level_filter_(buffer_level_filter),
+ TimestampScaler* timestamp_scaler,
+ AccelerateFactory* accelerate_factory,
+ ExpandFactory* expand_factory,
+ PreemptiveExpandFactory* preemptive_expand_factory,
+ bool create_components)
+ : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ buffer_level_filter_(buffer_level_filter),
decoder_database_(decoder_database),
delay_manager_(delay_manager),
delay_peak_detector_(delay_peak_detector),
@@ -69,8 +74,10 @@ NetEqImpl::NetEqImpl(int fs,
payload_splitter_(payload_splitter),
timestamp_scaler_(timestamp_scaler),
vad_(new PostDecodeVad()),
+ expand_factory_(expand_factory),
+ accelerate_factory_(accelerate_factory),
+ preemptive_expand_factory_(preemptive_expand_factory),
last_mode_(kModeNormal),
- mute_factor_array_(NULL),
decoded_buffer_length_(kMaxFrameSize),
decoded_buffer_(new int16_t[decoded_buffer_length_]),
playout_timestamp_(0),
@@ -83,7 +90,6 @@ NetEqImpl::NetEqImpl(int fs,
first_packet_(true),
error_code_(0),
decoder_error_code_(0),
- crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
decoded_packet_sequence_number_(-1),
decoded_packet_timestamp_(0) {
if (fs != 8000 && fs != 16000 && fs != 32000 && fs != 48000) {
@@ -91,19 +97,15 @@ NetEqImpl::NetEqImpl(int fs,
"Changing to 8000 Hz.";
fs = 8000;
}
- LOG(LS_INFO) << "Create NetEqImpl object with fs = " << fs << ".";
+ LOG(LS_VERBOSE) << "Create NetEqImpl object with fs = " << fs << ".";
fs_hz_ = fs;
fs_mult_ = fs / 8000;
output_size_samples_ = kOutputSizeMs * 8 * fs_mult_;
decoder_frame_length_ = 3 * output_size_samples_;
WebRtcSpl_Init();
- decision_logic_.reset(DecisionLogic::Create(fs_hz_, output_size_samples_,
- kPlayoutOn,
- decoder_database_.get(),
- *packet_buffer_.get(),
- delay_manager_.get(),
- buffer_level_filter_.get()));
- SetSampleRateAndChannels(fs, 1); // Default is 1 channel.
+ if (create_components) {
+ SetSampleRateAndChannels(fs, 1); // Default is 1 channel.
+ }
}
NetEqImpl::~NetEqImpl() {
@@ -143,12 +145,12 @@ int NetEqImpl::InsertSyncPacket(const WebRtcRTPHeader& rtp_header,
int error = InsertPacketInternal(
rtp_header, kSyncPayload, sizeof(kSyncPayload), receive_timestamp, true);
- if (error != 0) {
- LOG_FERR1(LS_WARNING, InsertPacketInternal, error);
- error_code_ = error;
- return kFail;
- }
- return kOK;
+ if (error != 0) {
+ LOG_FERR1(LS_WARNING, InsertPacketInternal, error);
+ error_code_ = error;
+ return kFail;
+ }
+ return kOK;
}
int NetEqImpl::GetAudio(size_t max_length, int16_t* output_audio,
@@ -198,7 +200,6 @@ int NetEqImpl::RegisterPayloadType(enum NetEqDecoder codec,
int NetEqImpl::RegisterExternalDecoder(AudioDecoder* decoder,
enum NetEqDecoder codec,
- int sample_rate_hz,
uint8_t rtp_payload_type) {
CriticalSectionScoped lock(crit_sect_.get());
LOG_API2(static_cast<int>(rtp_payload_type), codec);
@@ -207,6 +208,7 @@ int NetEqImpl::RegisterExternalDecoder(AudioDecoder* decoder,
assert(false);
return kFail;
}
+ const int sample_rate_hz = AudioDecoder::CodecSampleRateHz(codec);
int ret = decoder_database_->InsertExternal(rtp_payload_type, codec,
sample_rate_hz, decoder);
if (ret != DecoderDatabase::kOK) {
@@ -278,12 +280,7 @@ void NetEqImpl::SetPlayoutMode(NetEqPlayoutMode mode) {
CriticalSectionScoped lock(crit_sect_.get());
if (!decision_logic_.get() || mode != decision_logic_->playout_mode()) {
// The reset() method calls delete for the old object.
- decision_logic_.reset(DecisionLogic::Create(fs_hz_, output_size_samples_,
- mode,
- decoder_database_.get(),
- *packet_buffer_.get(),
- delay_manager_.get(),
- buffer_level_filter_.get()));
+ CreateDecisionLogic(mode);
}
}
@@ -338,9 +335,15 @@ void NetEqImpl::DisableVad() {
vad_->Disable();
}
-uint32_t NetEqImpl::PlayoutTimestamp() {
+bool NetEqImpl::GetPlayoutTimestamp(uint32_t* timestamp) {
CriticalSectionScoped lock(crit_sect_.get());
- return timestamp_scaler_->ToExternal(playout_timestamp_);
+ if (first_packet_) {
+ // We don't have a valid RTP timestamp until we have decoded our first
+ // RTP packet.
+ return false;
+ }
+ *timestamp = timestamp_scaler_->ToExternal(playout_timestamp_);
+ return true;
}
int NetEqImpl::LastError() {
@@ -367,12 +370,9 @@ void NetEqImpl::FlushBuffers() {
}
void NetEqImpl::PacketBufferStatistics(int* current_num_packets,
- int* max_num_packets,
- int* current_memory_size_bytes,
- int* max_memory_size_bytes) const {
+ int* max_num_packets) const {
CriticalSectionScoped lock(crit_sect_.get());
- packet_buffer_->BufferStat(current_num_packets, max_num_packets,
- current_memory_size_bytes, max_memory_size_bytes);
+ packet_buffer_->BufferStat(current_num_packets, max_num_packets);
}
int NetEqImpl::DecodedRtpInfo(int* sequence_number, uint32_t* timestamp) const {
@@ -396,6 +396,11 @@ NetEqBackgroundNoiseMode NetEqImpl::BackgroundNoiseMode() const {
return background_noise_->mode();
}
+const SyncBuffer* NetEqImpl::sync_buffer_for_test() const {
+ CriticalSectionScoped lock(crit_sect_.get());
+ return sync_buffer_.get();
+}
+
// Methods below this line are private.
int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
@@ -552,10 +557,23 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
}
}
+ // Check for FEC in packets, and separate payloads into several packets.
+ int ret = payload_splitter_->SplitFec(&packet_list, decoder_database_.get());
+ if (ret != PayloadSplitter::kOK) {
+ LOG_FERR1(LS_WARNING, SplitFec, packet_list.size());
+ PacketBuffer::DeleteAllPackets(&packet_list);
+ switch (ret) {
+ case PayloadSplitter::kUnknownPayloadType:
+ return kUnknownRtpPayloadType;
+ default:
+ return kOtherError;
+ }
+ }
+
// Split payloads into smaller chunks. This also verifies that all payloads
// are of a known payload type. SplitAudio() method is protected against
// sync-packets.
- int ret = payload_splitter_->SplitAudio(&packet_list, *decoder_database_);
+ ret = payload_splitter_->SplitAudio(&packet_list, *decoder_database_);
if (ret != PayloadSplitter::kOK) {
LOG_FERR1(LS_WARNING, SplitAudio, packet_list.size());
PacketBuffer::DeleteAllPackets(&packet_list);
@@ -595,9 +613,6 @@ int NetEqImpl::InsertPacketInternal(const WebRtcRTPHeader& rtp_header,
new_codec_ = true;
update_sample_rate_and_channels = true;
LOG_F(LS_WARNING) << "Packet buffer flushed";
- } else if (ret == PacketBuffer::kOversizePacket) {
- LOG_F(LS_WARNING) << "Packet larger than packet buffer";
- return kOversizePacket;
} else if (ret != PacketBuffer::kOK) {
LOG_FERR1(LS_WARNING, InsertPacketList, packet_list.size());
PacketBuffer::DeleteAllPackets(&packet_list);
@@ -827,10 +842,10 @@ int NetEqImpl::GetAudioInternal(size_t max_length, int16_t* output,
sync_buffer_->set_dtmf_index(sync_buffer_->Size());
}
- if ((last_mode_ != kModeExpand) && (last_mode_ != kModeRfc3389Cng)) {
- // If last operation was neither expand, nor comfort noise, calculate the
- // |playout_timestamp_| from the |sync_buffer_|. However, do not update the
- // |playout_timestamp_| if it would be moved "backwards".
+ if (last_mode_ != kModeExpand) {
+ // If last operation was not expand, calculate the |playout_timestamp_| from
+ // the |sync_buffer_|. However, do not update the |playout_timestamp_| if it
+ // would be moved "backwards".
uint32_t temp_timestamp = sync_buffer_->end_timestamp() -
static_cast<uint32_t>(sync_buffer_->FutureLength());
if (static_cast<int32_t>(temp_timestamp - playout_timestamp_) > 0) {
@@ -864,15 +879,15 @@ int NetEqImpl::GetDecision(Operations* operation,
}
const RTPHeader* header = packet_buffer_->NextRtpHeader();
- if (decision_logic_->CngRfc3389On()) {
+ if (decision_logic_->CngRfc3389On() || last_mode_ == kModeRfc3389Cng) {
// Because of timestamp peculiarities, we have to "manually" disallow using
// a CNG packet with the same timestamp as the one that was last played.
// This can happen when using redundancy and will cause the timing to shift.
- while (header &&
- decoder_database_->IsComfortNoise(header->payloadType) &&
- end_timestamp >= header->timestamp) {
+ while (header && decoder_database_->IsComfortNoise(header->payloadType) &&
+ (end_timestamp >= header->timestamp ||
+ end_timestamp + decision_logic_->generated_noise_samples() >
+ header->timestamp)) {
// Don't use this packet, discard it.
- // TODO(hlundin): Write test for this case.
if (packet_buffer_->DiscardNextPacket() != PacketBuffer::kOK) {
assert(false); // Must be ok by design.
}
@@ -924,7 +939,7 @@ int NetEqImpl::GetDecision(Operations* operation,
return 0;
}
- decision_logic_->ExpandDecision(*operation == kExpand);
+ decision_logic_->ExpandDecision(*operation);
// Check conditions for reset.
if (new_codec_ || *operation == kUndefined) {
@@ -1043,6 +1058,11 @@ int NetEqImpl::GetDecision(Operations* operation,
// Move on with the preemptive expand decision.
break;
}
+ case kMerge: {
+ required_samples =
+ std::max(merge_->RequiredFutureSamples(), required_samples);
+ break;
+ }
default: {
// Do nothing.
}
@@ -1126,12 +1146,11 @@ int NetEqImpl::Decode(PacketList* packet_list, Operations* operation,
PacketBuffer::DeleteAllPackets(packet_list);
return kDecoderNotFound;
}
- // We should have correct sampling rate and number of channels. They
- // are set when packets are inserted.
+ // If sampling rate or number of channels has changed, we need to make
+ // a reset.
if (decoder_info->fs_hz != fs_hz_ ||
decoder->channels() != algorithm_buffer_->Channels()) {
- LOG_F(LS_ERROR) << "Sampling rate or number of channels mismatch.";
- assert(false);
+ // TODO(tlegrand): Add unittest to cover this event.
SetSampleRateAndChannels(decoder_info->fs_hz, decoder->channels());
}
sync_buffer_->set_end_timestamp(timestamp_);
@@ -1771,8 +1790,14 @@ int NetEqImpl::ExtractPackets(int required_samples, PacketList* packet_list) {
AudioDecoder* decoder = decoder_database_->GetDecoder(
packet->header.payloadType);
if (decoder) {
- packet_duration = packet->sync_packet ? decoder_frame_length_ :
- decoder->PacketDuration(packet->payload, packet->payload_length);
+ if (packet->sync_packet) {
+ packet_duration = decoder_frame_length_;
+ } else {
+ packet_duration = packet->primary ?
+ decoder->PacketDuration(packet->payload, packet->payload_length) :
+ decoder->PacketDurationRedundant(packet->payload,
+ packet->payload_length);
+ }
} else {
LOG_FERR1(LS_WARNING, GetDecoder, packet->header.payloadType) <<
"Could not find a decoder for a packet about to be extracted.";
@@ -1805,6 +1830,14 @@ int NetEqImpl::ExtractPackets(int required_samples, PacketList* packet_list) {
return extracted_samples;
}
+void NetEqImpl::UpdatePlcComponents(int fs_hz, size_t channels) {
+ // Delete objects and create new ones.
+ expand_.reset(expand_factory_->Create(background_noise_.get(),
+ sync_buffer_.get(), &random_vector_,
+ fs_hz, channels));
+ merge_.reset(new Merge(fs_hz, channels, expand_.get(), sync_buffer_.get()));
+}
+
void NetEqImpl::SetSampleRateAndChannels(int fs_hz, size_t channels) {
LOG_API2(fs_hz, channels);
// TODO(hlundin): Change to an enumerator and skip assert.
@@ -1852,19 +1885,20 @@ void NetEqImpl::SetSampleRateAndChannels(int fs_hz, size_t channels) {
// Reset random vector.
random_vector_.Reset();
- // Delete Expand object and create a new one.
- expand_.reset(new Expand(background_noise_.get(), sync_buffer_.get(),
- &random_vector_, fs_hz, channels));
+ UpdatePlcComponents(fs_hz, channels);
+
// Move index so that we create a small set of future samples (all 0).
sync_buffer_->set_next_index(sync_buffer_->next_index() -
- expand_->overlap_length());
+ expand_->overlap_length());
normal_.reset(new Normal(fs_hz, decoder_database_.get(), *background_noise_,
expand_.get()));
- merge_.reset(new Merge(fs_hz, channels, expand_.get(), sync_buffer_.get()));
- accelerate_.reset(new Accelerate(fs_hz, channels, *background_noise_));
- preemptive_expand_.reset(new PreemptiveExpand(fs_hz, channels,
- *background_noise_));
+ accelerate_.reset(
+ accelerate_factory_->Create(fs_hz, channels, *background_noise_));
+ preemptive_expand_.reset(preemptive_expand_factory_->Create(
+ fs_hz, channels,
+ *background_noise_,
+ static_cast<int>(expand_->overlap_length())));
// Delete ComfortNoise object and create a new one.
comfort_noise_.reset(new ComfortNoise(fs_hz, decoder_database_.get(),
@@ -1877,8 +1911,11 @@ void NetEqImpl::SetSampleRateAndChannels(int fs_hz, size_t channels) {
decoded_buffer_.reset(new int16_t[decoded_buffer_length_]);
}
- // Communicate new sample rate and output size to DecisionLogic object.
- assert(decision_logic_.get());
+ // Create DecisionLogic if it is not created yet, then communicate new sample
+ // rate and output size to DecisionLogic object.
+ if (!decision_logic_.get()) {
+ CreateDecisionLogic(kPlayoutOn);
+ }
decision_logic_->SetSampleRate(fs_hz_, output_size_samples_);
}
@@ -1899,4 +1936,12 @@ NetEqOutputType NetEqImpl::LastOutputType() {
}
}
+void NetEqImpl::CreateDecisionLogic(NetEqPlayoutMode mode) {
+ decision_logic_.reset(DecisionLogic::Create(fs_hz_, output_size_samples_,
+ mode,
+ decoder_database_.get(),
+ *packet_buffer_.get(),
+ delay_manager_.get(),
+ buffer_level_filter_.get()));
+}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h
index 83dd58b5acb..e92babd8e35 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl.h
@@ -8,20 +8,21 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_NETEQ_IMPL_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_NETEQ_IMPL_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_NETEQ_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_NETEQ_IMPL_H_
#include <vector>
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/defines.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/modules/audio_coding/neteq4/packet.h" // Declare PacketList.
-#include "webrtc/modules/audio_coding/neteq4/random_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/rtcp.h"
-#include "webrtc/modules/audio_coding/neteq4/statistics_calculator.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/defines.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h" // Declare PacketList.
+#include "webrtc/modules/audio_coding/neteq/random_vector.h"
+#include "webrtc/modules/audio_coding/neteq/rtcp.h"
+#include "webrtc/modules/audio_coding/neteq/statistics_calculator.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -48,7 +49,10 @@ class PreemptiveExpand;
class RandomVector;
class SyncBuffer;
class TimestampScaler;
+struct AccelerateFactory;
struct DtmfEvent;
+struct ExpandFactory;
+struct PreemptiveExpandFactory;
class NetEqImpl : public webrtc::NetEq {
public:
@@ -63,7 +67,11 @@ class NetEqImpl : public webrtc::NetEq {
DtmfToneGenerator* dtmf_tone_generator,
PacketBuffer* packet_buffer,
PayloadSplitter* payload_splitter,
- TimestampScaler* timestamp_scaler);
+ TimestampScaler* timestamp_scaler,
+ AccelerateFactory* accelerate_factory,
+ ExpandFactory* expand_factory,
+ PreemptiveExpandFactory* preemptive_expand_factory,
+ bool create_components = true);
virtual ~NetEqImpl();
@@ -107,11 +115,10 @@ class NetEqImpl : public webrtc::NetEq {
// Provides an externally created decoder object |decoder| to insert in the
// decoder database. The decoder implements a decoder of type |codec| and
- // associates it with |rtp_payload_type|. The decoder operates at the
- // frequency |sample_rate_hz|. Returns kOK on success, kFail on failure.
+ // associates it with |rtp_payload_type|. Returns kOK on success, kFail on
+ // failure.
virtual int RegisterExternalDecoder(AudioDecoder* decoder,
enum NetEqDecoder codec,
- int sample_rate_hz,
uint8_t rtp_payload_type);
// Removes |rtp_payload_type| from the codec database. Returns 0 on success,
@@ -159,8 +166,7 @@ class NetEqImpl : public webrtc::NetEq {
// Disables post-decode VAD.
virtual void DisableVad();
- // Returns the RTP timestamp for the last sample delivered by GetAudio().
- virtual uint32_t PlayoutTimestamp();
+ virtual bool GetPlayoutTimestamp(uint32_t* timestamp);
virtual int SetTargetNumberOfChannels() { return kNotImplemented; }
@@ -179,9 +185,7 @@ class NetEqImpl : public webrtc::NetEq {
virtual void FlushBuffers();
virtual void PacketBufferStatistics(int* current_num_packets,
- int* max_num_packets,
- int* current_memory_size_bytes,
- int* max_memory_size_bytes) const;
+ int* max_num_packets) const;
// Get sequence number and timestamp of the latest RTP.
// This method is to facilitate NACK.
@@ -193,7 +197,10 @@ class NetEqImpl : public webrtc::NetEq {
// Gets background noise mode.
virtual NetEqBackgroundNoiseMode BackgroundNoiseMode() const;
- private:
+ // This accessor method is only intended for testing purposes.
+ virtual const SyncBuffer* sync_buffer_for_test() const;
+
+ protected:
static const int kOutputSizeMs = 10;
static const int kMaxFrameSize = 2880; // 60 ms @ 48 kHz.
// TODO(hlundin): Provide a better value for kSyncBufferSize.
@@ -206,8 +213,8 @@ class NetEqImpl : public webrtc::NetEq {
const uint8_t* payload,
int length_bytes,
uint32_t receive_timestamp,
- bool is_sync_packet);
-
+ bool is_sync_packet)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Delivers 10 ms of audio data. The data is written to |output|, which can
// hold (at least) |max_length| elements. The number of channels that were
@@ -215,9 +222,10 @@ class NetEqImpl : public webrtc::NetEq {
// and each channel contains |samples_per_channel| elements. If more than one
// channel is written, the samples are interleaved.
// Returns 0 on success, otherwise an error code.
- int GetAudioInternal(size_t max_length, int16_t* output,
- int* samples_per_channel, int* num_channels);
-
+ int GetAudioInternal(size_t max_length,
+ int16_t* output,
+ int* samples_per_channel,
+ int* num_channels) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Provides a decision to the GetAudioInternal method. The decision what to
// do is written to |operation|. Packets to decode are written to
@@ -227,7 +235,7 @@ class NetEqImpl : public webrtc::NetEq {
int GetDecision(Operations* operation,
PacketList* packet_list,
DtmfEvent* dtmf_event,
- bool* play_dtmf);
+ bool* play_dtmf) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Decodes the speech packets in |packet_list|, and writes the results to
// |decoded_buffer|, which is allocated to hold |decoded_buffer_length|
@@ -235,113 +243,150 @@ class NetEqImpl : public webrtc::NetEq {
// The speech type -- speech or (codec-internal) comfort noise -- is written
// to |speech_type|. If |packet_list| contains any SID frames for RFC 3389
// comfort noise, those are not decoded.
- int Decode(PacketList* packet_list, Operations* operation,
- int* decoded_length, AudioDecoder::SpeechType* speech_type);
+ int Decode(PacketList* packet_list,
+ Operations* operation,
+ int* decoded_length,
+ AudioDecoder::SpeechType* speech_type)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Sub-method to Decode(). Performs the actual decoding.
- int DecodeLoop(PacketList* packet_list, Operations* operation,
- AudioDecoder* decoder, int* decoded_length,
- AudioDecoder::SpeechType* speech_type);
+ int DecodeLoop(PacketList* packet_list,
+ Operations* operation,
+ AudioDecoder* decoder,
+ int* decoded_length,
+ AudioDecoder::SpeechType* speech_type)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Sub-method which calls the Normal class to perform the normal operation.
- void DoNormal(const int16_t* decoded_buffer, size_t decoded_length,
- AudioDecoder::SpeechType speech_type, bool play_dtmf);
+ void DoNormal(const int16_t* decoded_buffer,
+ size_t decoded_length,
+ AudioDecoder::SpeechType speech_type,
+ bool play_dtmf) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Sub-method which calls the Merge class to perform the merge operation.
- void DoMerge(int16_t* decoded_buffer, size_t decoded_length,
- AudioDecoder::SpeechType speech_type, bool play_dtmf);
+ void DoMerge(int16_t* decoded_buffer,
+ size_t decoded_length,
+ AudioDecoder::SpeechType speech_type,
+ bool play_dtmf) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Sub-method which calls the Expand class to perform the expand operation.
- int DoExpand(bool play_dtmf);
+ int DoExpand(bool play_dtmf) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Sub-method which calls the Accelerate class to perform the accelerate
// operation.
- int DoAccelerate(int16_t* decoded_buffer, size_t decoded_length,
- AudioDecoder::SpeechType speech_type, bool play_dtmf);
+ int DoAccelerate(int16_t* decoded_buffer,
+ size_t decoded_length,
+ AudioDecoder::SpeechType speech_type,
+ bool play_dtmf) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Sub-method which calls the PreemptiveExpand class to perform the
// preemtive expand operation.
- int DoPreemptiveExpand(int16_t* decoded_buffer, size_t decoded_length,
- AudioDecoder::SpeechType speech_type, bool play_dtmf);
+ int DoPreemptiveExpand(int16_t* decoded_buffer,
+ size_t decoded_length,
+ AudioDecoder::SpeechType speech_type,
+ bool play_dtmf) EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Sub-method which calls the ComfortNoise class to generate RFC 3389 comfort
// noise. |packet_list| can either contain one SID frame to update the
// noise parameters, or no payload at all, in which case the previously
// received parameters are used.
- int DoRfc3389Cng(PacketList* packet_list, bool play_dtmf);
+ int DoRfc3389Cng(PacketList* packet_list, bool play_dtmf)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Calls the audio decoder to generate codec-internal comfort noise when
// no packet was received.
- void DoCodecInternalCng();
+ void DoCodecInternalCng() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Calls the DtmfToneGenerator class to generate DTMF tones.
- int DoDtmf(const DtmfEvent& dtmf_event, bool* play_dtmf);
+ int DoDtmf(const DtmfEvent& dtmf_event, bool* play_dtmf)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Produces packet-loss concealment using alternative methods. If the codec
// has an internal PLC, it is called to generate samples. Otherwise, the
// method performs zero-stuffing.
- void DoAlternativePlc(bool increase_timestamp);
+ void DoAlternativePlc(bool increase_timestamp)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Overdub DTMF on top of |output|.
- int DtmfOverdub(const DtmfEvent& dtmf_event, size_t num_channels,
- int16_t* output) const;
+ int DtmfOverdub(const DtmfEvent& dtmf_event,
+ size_t num_channels,
+ int16_t* output) const EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Extracts packets from |packet_buffer_| to produce at least
// |required_samples| samples. The packets are inserted into |packet_list|.
// Returns the number of samples that the packets in the list will produce, or
// -1 in case of an error.
- int ExtractPackets(int required_samples, PacketList* packet_list);
+ int ExtractPackets(int required_samples, PacketList* packet_list)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Resets various variables and objects to new values based on the sample rate
// |fs_hz| and |channels| number audio channels.
- void SetSampleRateAndChannels(int fs_hz, size_t channels);
+ void SetSampleRateAndChannels(int fs_hz, size_t channels)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
// Returns the output type for the audio produced by the latest call to
// GetAudio().
- NetEqOutputType LastOutputType();
-
- scoped_ptr<BackgroundNoise> background_noise_;
- scoped_ptr<BufferLevelFilter> buffer_level_filter_;
- scoped_ptr<DecoderDatabase> decoder_database_;
- scoped_ptr<DelayManager> delay_manager_;
- scoped_ptr<DelayPeakDetector> delay_peak_detector_;
- scoped_ptr<DtmfBuffer> dtmf_buffer_;
- scoped_ptr<DtmfToneGenerator> dtmf_tone_generator_;
- scoped_ptr<PacketBuffer> packet_buffer_;
- scoped_ptr<PayloadSplitter> payload_splitter_;
- scoped_ptr<TimestampScaler> timestamp_scaler_;
- scoped_ptr<DecisionLogic> decision_logic_;
- scoped_ptr<PostDecodeVad> vad_;
- scoped_ptr<AudioMultiVector> algorithm_buffer_;
- scoped_ptr<SyncBuffer> sync_buffer_;
- scoped_ptr<Expand> expand_;
- scoped_ptr<Normal> normal_;
- scoped_ptr<Merge> merge_;
- scoped_ptr<Accelerate> accelerate_;
- scoped_ptr<PreemptiveExpand> preemptive_expand_;
- RandomVector random_vector_;
- scoped_ptr<ComfortNoise> comfort_noise_;
- Rtcp rtcp_;
- StatisticsCalculator stats_;
- int fs_hz_;
- int fs_mult_;
- int output_size_samples_;
- int decoder_frame_length_;
- Modes last_mode_;
- scoped_array<int16_t> mute_factor_array_;
- size_t decoded_buffer_length_;
- scoped_array<int16_t> decoded_buffer_;
- uint32_t playout_timestamp_;
- bool new_codec_;
- uint32_t timestamp_;
- bool reset_decoder_;
- uint8_t current_rtp_payload_type_;
- uint8_t current_cng_rtp_payload_type_;
- uint32_t ssrc_;
- bool first_packet_;
- int error_code_; // Store last error code.
- int decoder_error_code_;
- scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ NetEqOutputType LastOutputType() EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Updates Expand and Merge.
+ virtual void UpdatePlcComponents(int fs_hz, size_t channels)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ // Creates DecisionLogic object for the given mode.
+ virtual void CreateDecisionLogic(NetEqPlayoutMode mode)
+ EXCLUSIVE_LOCKS_REQUIRED(crit_sect_);
+
+ const scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ const scoped_ptr<BufferLevelFilter> buffer_level_filter_
+ GUARDED_BY(crit_sect_);
+ const scoped_ptr<DecoderDatabase> decoder_database_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<DelayManager> delay_manager_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<DelayPeakDetector> delay_peak_detector_
+ GUARDED_BY(crit_sect_);
+ const scoped_ptr<DtmfBuffer> dtmf_buffer_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<DtmfToneGenerator> dtmf_tone_generator_
+ GUARDED_BY(crit_sect_);
+ const scoped_ptr<PacketBuffer> packet_buffer_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<PayloadSplitter> payload_splitter_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<TimestampScaler> timestamp_scaler_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<PostDecodeVad> vad_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<ExpandFactory> expand_factory_ GUARDED_BY(crit_sect_);
+ const scoped_ptr<AccelerateFactory> accelerate_factory_
+ GUARDED_BY(crit_sect_);
+ const scoped_ptr<PreemptiveExpandFactory> preemptive_expand_factory_
+ GUARDED_BY(crit_sect_);
+
+ scoped_ptr<BackgroundNoise> background_noise_ GUARDED_BY(crit_sect_);
+ scoped_ptr<DecisionLogic> decision_logic_ GUARDED_BY(crit_sect_);
+ scoped_ptr<AudioMultiVector> algorithm_buffer_ GUARDED_BY(crit_sect_);
+ scoped_ptr<SyncBuffer> sync_buffer_ GUARDED_BY(crit_sect_);
+ scoped_ptr<Expand> expand_ GUARDED_BY(crit_sect_);
+ scoped_ptr<Normal> normal_ GUARDED_BY(crit_sect_);
+ scoped_ptr<Merge> merge_ GUARDED_BY(crit_sect_);
+ scoped_ptr<Accelerate> accelerate_ GUARDED_BY(crit_sect_);
+ scoped_ptr<PreemptiveExpand> preemptive_expand_ GUARDED_BY(crit_sect_);
+ RandomVector random_vector_ GUARDED_BY(crit_sect_);
+ scoped_ptr<ComfortNoise> comfort_noise_ GUARDED_BY(crit_sect_);
+ Rtcp rtcp_ GUARDED_BY(crit_sect_);
+ StatisticsCalculator stats_ GUARDED_BY(crit_sect_);
+ int fs_hz_ GUARDED_BY(crit_sect_);
+ int fs_mult_ GUARDED_BY(crit_sect_);
+ int output_size_samples_ GUARDED_BY(crit_sect_);
+ int decoder_frame_length_ GUARDED_BY(crit_sect_);
+ Modes last_mode_ GUARDED_BY(crit_sect_);
+ scoped_ptr<int16_t[]> mute_factor_array_ GUARDED_BY(crit_sect_);
+ size_t decoded_buffer_length_ GUARDED_BY(crit_sect_);
+ scoped_ptr<int16_t[]> decoded_buffer_ GUARDED_BY(crit_sect_);
+ uint32_t playout_timestamp_ GUARDED_BY(crit_sect_);
+ bool new_codec_ GUARDED_BY(crit_sect_);
+ uint32_t timestamp_ GUARDED_BY(crit_sect_);
+ bool reset_decoder_ GUARDED_BY(crit_sect_);
+ uint8_t current_rtp_payload_type_ GUARDED_BY(crit_sect_);
+ uint8_t current_cng_rtp_payload_type_ GUARDED_BY(crit_sect_);
+ uint32_t ssrc_ GUARDED_BY(crit_sect_);
+ bool first_packet_ GUARDED_BY(crit_sect_);
+ int error_code_ GUARDED_BY(crit_sect_); // Store last error code.
+ int decoder_error_code_ GUARDED_BY(crit_sect_);
// These values are used by NACK module to estimate time-to-play of
// a missing packet. Occasionally, NetEq might decide to decode more
@@ -350,11 +395,12 @@ class NetEqImpl : public webrtc::NetEq {
// such cases, these values do not exactly represent the sequence number
// or timestamp associated with a 10ms audio pulled from NetEq. NACK
// module is designed to compensate for this.
- int decoded_packet_sequence_number_;
- uint32_t decoded_packet_timestamp_;
+ int decoded_packet_sequence_number_ GUARDED_BY(crit_sect_);
+ uint32_t decoded_packet_timestamp_ GUARDED_BY(crit_sect_);
+ private:
DISALLOW_COPY_AND_ASSIGN(NetEqImpl);
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_NETEQ_IMPL_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_NETEQ_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc
new file mode 100644
index 00000000000..2e66487fae5
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_impl_unittest.cc
@@ -0,0 +1,498 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/neteq_impl.h"
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/accelerate.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_buffer_level_filter.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_delay_manager.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_delay_peak_detector.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_dtmf_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_dtmf_tone_generator.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_packet_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_payload_splitter.h"
+#include "webrtc/modules/audio_coding/neteq/preemptive_expand.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/timestamp_scaler.h"
+
+using ::testing::Return;
+using ::testing::ReturnNull;
+using ::testing::_;
+using ::testing::SetArgPointee;
+using ::testing::InSequence;
+using ::testing::Invoke;
+using ::testing::WithArg;
+
+namespace webrtc {
+
+// This function is called when inserting a packet list into the mock packet
+// buffer. The purpose is to delete all inserted packets properly, to avoid
+// memory leaks in the test.
+int DeletePacketsAndReturnOk(PacketList* packet_list) {
+ PacketBuffer::DeleteAllPackets(packet_list);
+ return PacketBuffer::kOK;
+}
+
+class NetEqImplTest : public ::testing::Test {
+ protected:
+ NetEqImplTest()
+ : neteq_(NULL),
+ config_(),
+ mock_buffer_level_filter_(NULL),
+ buffer_level_filter_(NULL),
+ use_mock_buffer_level_filter_(true),
+ mock_decoder_database_(NULL),
+ decoder_database_(NULL),
+ use_mock_decoder_database_(true),
+ mock_delay_peak_detector_(NULL),
+ delay_peak_detector_(NULL),
+ use_mock_delay_peak_detector_(true),
+ mock_delay_manager_(NULL),
+ delay_manager_(NULL),
+ use_mock_delay_manager_(true),
+ mock_dtmf_buffer_(NULL),
+ dtmf_buffer_(NULL),
+ use_mock_dtmf_buffer_(true),
+ mock_dtmf_tone_generator_(NULL),
+ dtmf_tone_generator_(NULL),
+ use_mock_dtmf_tone_generator_(true),
+ mock_packet_buffer_(NULL),
+ packet_buffer_(NULL),
+ use_mock_packet_buffer_(true),
+ mock_payload_splitter_(NULL),
+ payload_splitter_(NULL),
+ use_mock_payload_splitter_(true),
+ timestamp_scaler_(NULL) {
+ config_.sample_rate_hz = 8000;
+ }
+
+ void CreateInstance() {
+ if (use_mock_buffer_level_filter_) {
+ mock_buffer_level_filter_ = new MockBufferLevelFilter;
+ buffer_level_filter_ = mock_buffer_level_filter_;
+ } else {
+ buffer_level_filter_ = new BufferLevelFilter;
+ }
+ if (use_mock_decoder_database_) {
+ mock_decoder_database_ = new MockDecoderDatabase;
+ EXPECT_CALL(*mock_decoder_database_, GetActiveCngDecoder())
+ .WillOnce(ReturnNull());
+ decoder_database_ = mock_decoder_database_;
+ } else {
+ decoder_database_ = new DecoderDatabase;
+ }
+ if (use_mock_delay_peak_detector_) {
+ mock_delay_peak_detector_ = new MockDelayPeakDetector;
+ EXPECT_CALL(*mock_delay_peak_detector_, Reset()).Times(1);
+ delay_peak_detector_ = mock_delay_peak_detector_;
+ } else {
+ delay_peak_detector_ = new DelayPeakDetector;
+ }
+ if (use_mock_delay_manager_) {
+ mock_delay_manager_ = new MockDelayManager(config_.max_packets_in_buffer,
+ delay_peak_detector_);
+ EXPECT_CALL(*mock_delay_manager_, set_streaming_mode(false)).Times(1);
+ delay_manager_ = mock_delay_manager_;
+ } else {
+ delay_manager_ =
+ new DelayManager(config_.max_packets_in_buffer, delay_peak_detector_);
+ }
+ if (use_mock_dtmf_buffer_) {
+ mock_dtmf_buffer_ = new MockDtmfBuffer(config_.sample_rate_hz);
+ dtmf_buffer_ = mock_dtmf_buffer_;
+ } else {
+ dtmf_buffer_ = new DtmfBuffer(config_.sample_rate_hz);
+ }
+ if (use_mock_dtmf_tone_generator_) {
+ mock_dtmf_tone_generator_ = new MockDtmfToneGenerator;
+ dtmf_tone_generator_ = mock_dtmf_tone_generator_;
+ } else {
+ dtmf_tone_generator_ = new DtmfToneGenerator;
+ }
+ if (use_mock_packet_buffer_) {
+ mock_packet_buffer_ = new MockPacketBuffer(config_.max_packets_in_buffer);
+ packet_buffer_ = mock_packet_buffer_;
+ } else {
+ packet_buffer_ = new PacketBuffer(config_.max_packets_in_buffer);
+ }
+ if (use_mock_payload_splitter_) {
+ mock_payload_splitter_ = new MockPayloadSplitter;
+ payload_splitter_ = mock_payload_splitter_;
+ } else {
+ payload_splitter_ = new PayloadSplitter;
+ }
+ timestamp_scaler_ = new TimestampScaler(*decoder_database_);
+ AccelerateFactory* accelerate_factory = new AccelerateFactory;
+ ExpandFactory* expand_factory = new ExpandFactory;
+ PreemptiveExpandFactory* preemptive_expand_factory =
+ new PreemptiveExpandFactory;
+
+ neteq_ = new NetEqImpl(config_.sample_rate_hz,
+ buffer_level_filter_,
+ decoder_database_,
+ delay_manager_,
+ delay_peak_detector_,
+ dtmf_buffer_,
+ dtmf_tone_generator_,
+ packet_buffer_,
+ payload_splitter_,
+ timestamp_scaler_,
+ accelerate_factory,
+ expand_factory,
+ preemptive_expand_factory);
+ ASSERT_TRUE(neteq_ != NULL);
+ }
+
+ void UseNoMocks() {
+ ASSERT_TRUE(neteq_ == NULL) << "Must call UseNoMocks before CreateInstance";
+ use_mock_buffer_level_filter_ = false;
+ use_mock_decoder_database_ = false;
+ use_mock_delay_peak_detector_ = false;
+ use_mock_delay_manager_ = false;
+ use_mock_dtmf_buffer_ = false;
+ use_mock_dtmf_tone_generator_ = false;
+ use_mock_packet_buffer_ = false;
+ use_mock_payload_splitter_ = false;
+ }
+
+ virtual ~NetEqImplTest() {
+ if (use_mock_buffer_level_filter_) {
+ EXPECT_CALL(*mock_buffer_level_filter_, Die()).Times(1);
+ }
+ if (use_mock_decoder_database_) {
+ EXPECT_CALL(*mock_decoder_database_, Die()).Times(1);
+ }
+ if (use_mock_delay_manager_) {
+ EXPECT_CALL(*mock_delay_manager_, Die()).Times(1);
+ }
+ if (use_mock_delay_peak_detector_) {
+ EXPECT_CALL(*mock_delay_peak_detector_, Die()).Times(1);
+ }
+ if (use_mock_dtmf_buffer_) {
+ EXPECT_CALL(*mock_dtmf_buffer_, Die()).Times(1);
+ }
+ if (use_mock_dtmf_tone_generator_) {
+ EXPECT_CALL(*mock_dtmf_tone_generator_, Die()).Times(1);
+ }
+ if (use_mock_packet_buffer_) {
+ EXPECT_CALL(*mock_packet_buffer_, Die()).Times(1);
+ }
+ delete neteq_;
+ }
+
+ NetEqImpl* neteq_;
+ NetEq::Config config_;
+ MockBufferLevelFilter* mock_buffer_level_filter_;
+ BufferLevelFilter* buffer_level_filter_;
+ bool use_mock_buffer_level_filter_;
+ MockDecoderDatabase* mock_decoder_database_;
+ DecoderDatabase* decoder_database_;
+ bool use_mock_decoder_database_;
+ MockDelayPeakDetector* mock_delay_peak_detector_;
+ DelayPeakDetector* delay_peak_detector_;
+ bool use_mock_delay_peak_detector_;
+ MockDelayManager* mock_delay_manager_;
+ DelayManager* delay_manager_;
+ bool use_mock_delay_manager_;
+ MockDtmfBuffer* mock_dtmf_buffer_;
+ DtmfBuffer* dtmf_buffer_;
+ bool use_mock_dtmf_buffer_;
+ MockDtmfToneGenerator* mock_dtmf_tone_generator_;
+ DtmfToneGenerator* dtmf_tone_generator_;
+ bool use_mock_dtmf_tone_generator_;
+ MockPacketBuffer* mock_packet_buffer_;
+ PacketBuffer* packet_buffer_;
+ bool use_mock_packet_buffer_;
+ MockPayloadSplitter* mock_payload_splitter_;
+ PayloadSplitter* payload_splitter_;
+ bool use_mock_payload_splitter_;
+ TimestampScaler* timestamp_scaler_;
+};
+
+
+// This tests the interface class NetEq.
+// TODO(hlundin): Move to separate file?
+TEST(NetEq, CreateAndDestroy) {
+ NetEq::Config config;
+ NetEq* neteq = NetEq::Create(config);
+ delete neteq;
+}
+
+TEST_F(NetEqImplTest, RegisterPayloadType) {
+ CreateInstance();
+ uint8_t rtp_payload_type = 0;
+ NetEqDecoder codec_type = kDecoderPCMu;
+ EXPECT_CALL(*mock_decoder_database_,
+ RegisterPayload(rtp_payload_type, codec_type));
+ neteq_->RegisterPayloadType(codec_type, rtp_payload_type);
+}
+
+TEST_F(NetEqImplTest, RemovePayloadType) {
+ CreateInstance();
+ uint8_t rtp_payload_type = 0;
+ EXPECT_CALL(*mock_decoder_database_, Remove(rtp_payload_type))
+ .WillOnce(Return(DecoderDatabase::kDecoderNotFound));
+ // Check that kFail is returned when database returns kDecoderNotFound.
+ EXPECT_EQ(NetEq::kFail, neteq_->RemovePayloadType(rtp_payload_type));
+}
+
+TEST_F(NetEqImplTest, InsertPacket) {
+ CreateInstance();
+ const int kPayloadLength = 100;
+ const uint8_t kPayloadType = 0;
+ const uint16_t kFirstSequenceNumber = 0x1234;
+ const uint32_t kFirstTimestamp = 0x12345678;
+ const uint32_t kSsrc = 0x87654321;
+ const uint32_t kFirstReceiveTime = 17;
+ uint8_t payload[kPayloadLength] = {0};
+ WebRtcRTPHeader rtp_header;
+ rtp_header.header.payloadType = kPayloadType;
+ rtp_header.header.sequenceNumber = kFirstSequenceNumber;
+ rtp_header.header.timestamp = kFirstTimestamp;
+ rtp_header.header.ssrc = kSsrc;
+
+ // Create a mock decoder object.
+ MockAudioDecoder mock_decoder;
+ // BWE update function called with first packet.
+ EXPECT_CALL(mock_decoder, IncomingPacket(_,
+ kPayloadLength,
+ kFirstSequenceNumber,
+ kFirstTimestamp,
+ kFirstReceiveTime));
+ // BWE update function called with second packet.
+ EXPECT_CALL(mock_decoder, IncomingPacket(_,
+ kPayloadLength,
+ kFirstSequenceNumber + 1,
+ kFirstTimestamp + 160,
+ kFirstReceiveTime + 155));
+ EXPECT_CALL(mock_decoder, Die()).Times(1); // Called when deleted.
+
+ // Expectations for decoder database.
+ EXPECT_CALL(*mock_decoder_database_, IsRed(kPayloadType))
+ .WillRepeatedly(Return(false)); // This is not RED.
+ EXPECT_CALL(*mock_decoder_database_, CheckPayloadTypes(_))
+ .Times(2)
+ .WillRepeatedly(Return(DecoderDatabase::kOK)); // Payload type is valid.
+ EXPECT_CALL(*mock_decoder_database_, IsDtmf(kPayloadType))
+ .WillRepeatedly(Return(false)); // This is not DTMF.
+ EXPECT_CALL(*mock_decoder_database_, GetDecoder(kPayloadType))
+ .Times(3)
+ .WillRepeatedly(Return(&mock_decoder));
+ EXPECT_CALL(*mock_decoder_database_, IsComfortNoise(kPayloadType))
+ .WillRepeatedly(Return(false)); // This is not CNG.
+ DecoderDatabase::DecoderInfo info;
+ info.codec_type = kDecoderPCMu;
+ EXPECT_CALL(*mock_decoder_database_, GetDecoderInfo(kPayloadType))
+ .WillRepeatedly(Return(&info));
+
+ // Expectations for packet buffer.
+ EXPECT_CALL(*mock_packet_buffer_, NumPacketsInBuffer())
+ .WillOnce(Return(0)) // First packet.
+ .WillOnce(Return(1)) // Second packet.
+ .WillOnce(Return(2)); // Second packet, checking after it was inserted.
+ EXPECT_CALL(*mock_packet_buffer_, Empty())
+ .WillOnce(Return(false)); // Called once after first packet is inserted.
+ EXPECT_CALL(*mock_packet_buffer_, Flush())
+ .Times(1);
+ EXPECT_CALL(*mock_packet_buffer_, InsertPacketList(_, _, _, _))
+ .Times(2)
+ .WillRepeatedly(DoAll(SetArgPointee<2>(kPayloadType),
+ WithArg<0>(Invoke(DeletePacketsAndReturnOk))));
+ // SetArgPointee<2>(kPayloadType) means that the third argument (zero-based
+ // index) is a pointer, and the variable pointed to is set to kPayloadType.
+ // Also invoke the function DeletePacketsAndReturnOk to properly delete all
+ // packets in the list (to avoid memory leaks in the test).
+ EXPECT_CALL(*mock_packet_buffer_, NextRtpHeader())
+ .Times(1)
+ .WillOnce(Return(&rtp_header.header));
+
+ // Expectations for DTMF buffer.
+ EXPECT_CALL(*mock_dtmf_buffer_, Flush())
+ .Times(1);
+
+ // Expectations for delay manager.
+ {
+ // All expectations within this block must be called in this specific order.
+ InSequence sequence; // Dummy variable.
+ // Expectations when the first packet is inserted.
+ EXPECT_CALL(*mock_delay_manager_, LastDecoderType(kDecoderPCMu))
+ .Times(1);
+ EXPECT_CALL(*mock_delay_manager_, last_pack_cng_or_dtmf())
+ .Times(2)
+ .WillRepeatedly(Return(-1));
+ EXPECT_CALL(*mock_delay_manager_, set_last_pack_cng_or_dtmf(0))
+ .Times(1);
+ EXPECT_CALL(*mock_delay_manager_, ResetPacketIatCount()).Times(1);
+ // Expectations when the second packet is inserted. Slightly different.
+ EXPECT_CALL(*mock_delay_manager_, LastDecoderType(kDecoderPCMu))
+ .Times(1);
+ EXPECT_CALL(*mock_delay_manager_, last_pack_cng_or_dtmf())
+ .WillOnce(Return(0));
+ EXPECT_CALL(*mock_delay_manager_, SetPacketAudioLength(30))
+ .WillOnce(Return(0));
+ }
+
+ // Expectations for payload splitter.
+ EXPECT_CALL(*mock_payload_splitter_, SplitAudio(_, _))
+ .Times(2)
+ .WillRepeatedly(Return(PayloadSplitter::kOK));
+
+ // Insert first packet.
+ neteq_->InsertPacket(rtp_header, payload, kPayloadLength, kFirstReceiveTime);
+
+ // Insert second packet.
+ rtp_header.header.timestamp += 160;
+ rtp_header.header.sequenceNumber += 1;
+ neteq_->InsertPacket(rtp_header, payload, kPayloadLength,
+ kFirstReceiveTime + 155);
+}
+
+TEST_F(NetEqImplTest, InsertPacketsUntilBufferIsFull) {
+ UseNoMocks();
+ CreateInstance();
+
+ const int kPayloadLengthSamples = 80;
+ const size_t kPayloadLengthBytes = 2 * kPayloadLengthSamples; // PCM 16-bit.
+ const uint8_t kPayloadType = 17; // Just an arbitrary number.
+ const uint32_t kReceiveTime = 17; // Value doesn't matter for this test.
+ uint8_t payload[kPayloadLengthBytes] = {0};
+ WebRtcRTPHeader rtp_header;
+ rtp_header.header.payloadType = kPayloadType;
+ rtp_header.header.sequenceNumber = 0x1234;
+ rtp_header.header.timestamp = 0x12345678;
+ rtp_header.header.ssrc = 0x87654321;
+
+ EXPECT_EQ(NetEq::kOK,
+ neteq_->RegisterPayloadType(kDecoderPCM16B, kPayloadType));
+
+ // Insert packets. The buffer should not flush.
+ for (int i = 1; i <= config_.max_packets_in_buffer; ++i) {
+ EXPECT_EQ(NetEq::kOK,
+ neteq_->InsertPacket(
+ rtp_header, payload, kPayloadLengthBytes, kReceiveTime));
+ rtp_header.header.timestamp += kPayloadLengthSamples;
+ rtp_header.header.sequenceNumber += 1;
+ EXPECT_EQ(i, packet_buffer_->NumPacketsInBuffer());
+ }
+
+ // Insert one more packet and make sure the buffer got flushed. That is, it
+ // should only hold one single packet.
+ EXPECT_EQ(NetEq::kOK,
+ neteq_->InsertPacket(
+ rtp_header, payload, kPayloadLengthBytes, kReceiveTime));
+ EXPECT_EQ(1, packet_buffer_->NumPacketsInBuffer());
+ const RTPHeader* test_header = packet_buffer_->NextRtpHeader();
+ EXPECT_EQ(rtp_header.header.timestamp, test_header->timestamp);
+ EXPECT_EQ(rtp_header.header.sequenceNumber, test_header->sequenceNumber);
+}
+
+// This test verifies that timestamps propagate from the incoming packets
+// through to the sync buffer and to the playout timestamp.
+TEST_F(NetEqImplTest, VerifyTimestampPropagation) {
+ UseNoMocks();
+ CreateInstance();
+
+ const uint8_t kPayloadType = 17; // Just an arbitrary number.
+ const uint32_t kReceiveTime = 17; // Value doesn't matter for this test.
+ const int kSampleRateHz = 8000;
+ const int kPayloadLengthSamples = 10 * kSampleRateHz / 1000; // 10 ms.
+ const size_t kPayloadLengthBytes = kPayloadLengthSamples;
+ uint8_t payload[kPayloadLengthBytes] = {0};
+ WebRtcRTPHeader rtp_header;
+ rtp_header.header.payloadType = kPayloadType;
+ rtp_header.header.sequenceNumber = 0x1234;
+ rtp_header.header.timestamp = 0x12345678;
+ rtp_header.header.ssrc = 0x87654321;
+
+ // This is a dummy decoder that produces as many output samples as the input
+ // has bytes. The output is an increasing series, starting at 1 for the first
+ // sample, and then increasing by 1 for each sample.
+ class CountingSamplesDecoder : public AudioDecoder {
+ public:
+ explicit CountingSamplesDecoder(enum NetEqDecoder type)
+ : AudioDecoder(type), next_value_(1) {}
+
+ // Produce as many samples as input bytes (|encoded_len|).
+ virtual int Decode(const uint8_t* encoded,
+ size_t encoded_len,
+ int16_t* decoded,
+ SpeechType* speech_type) {
+ for (size_t i = 0; i < encoded_len; ++i) {
+ decoded[i] = next_value_++;
+ }
+ *speech_type = kSpeech;
+ return encoded_len;
+ }
+
+ virtual int Init() {
+ next_value_ = 1;
+ return 0;
+ }
+
+ uint16_t next_value() const { return next_value_; }
+
+ private:
+ int16_t next_value_;
+ } decoder_(kDecoderPCM16B);
+
+ EXPECT_EQ(NetEq::kOK,
+ neteq_->RegisterExternalDecoder(
+ &decoder_, kDecoderPCM16B, kPayloadType));
+
+ // Insert one packet.
+ EXPECT_EQ(NetEq::kOK,
+ neteq_->InsertPacket(
+ rtp_header, payload, kPayloadLengthBytes, kReceiveTime));
+
+ // Pull audio once.
+ const int kMaxOutputSize = 10 * kSampleRateHz / 1000;
+ int16_t output[kMaxOutputSize];
+ int samples_per_channel;
+ int num_channels;
+ NetEqOutputType type;
+ EXPECT_EQ(
+ NetEq::kOK,
+ neteq_->GetAudio(
+ kMaxOutputSize, output, &samples_per_channel, &num_channels, &type));
+ ASSERT_EQ(kMaxOutputSize, samples_per_channel);
+ EXPECT_EQ(1, num_channels);
+ EXPECT_EQ(kOutputNormal, type);
+
+ // Start with a simple check that the fake decoder is behaving as expected.
+ EXPECT_EQ(kPayloadLengthSamples, decoder_.next_value() - 1);
+
+ // The value of the last of the output samples is the same as the number of
+ // samples played from the decoded packet. Thus, this number + the RTP
+ // timestamp should match the playout timestamp.
+ uint32_t timestamp = 0;
+ EXPECT_TRUE(neteq_->GetPlayoutTimestamp(&timestamp));
+ EXPECT_EQ(rtp_header.header.timestamp + output[samples_per_channel - 1],
+ timestamp);
+
+ // Check the timestamp for the last value in the sync buffer. This should
+ // be one full frame length ahead of the RTP timestamp.
+ const SyncBuffer* sync_buffer = neteq_->sync_buffer_for_test();
+ ASSERT_TRUE(sync_buffer != NULL);
+ EXPECT_EQ(rtp_header.header.timestamp + kPayloadLengthSamples,
+ sync_buffer->end_timestamp());
+
+ // Check that the number of samples still to play from the sync buffer add
+ // up with what was already played out.
+ EXPECT_EQ(kPayloadLengthSamples - output[samples_per_channel - 1],
+ static_cast<int>(sync_buffer->FutureLength()));
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_statistics.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_statistics.h
deleted file mode 100644
index bba5b06b964..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_statistics.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Definitions of statistics data structures for MCU and DSP sides.
- */
-
-#include "typedefs.h"
-
-#ifndef NETEQ_STATISTICS_H
-#define NETEQ_STATISTICS_H
-
-/*
- * Statistics struct on DSP side
- */
-typedef struct
-{
-
- /* variables for in-call statistics; queried through WebRtcNetEQ_GetNetworkStatistics */
- uint32_t expandLength; /* number of samples produced through expand */
- uint32_t preemptiveLength; /* number of samples produced through pre-emptive
- expand */
- uint32_t accelerateLength; /* number of samples removed through accelerate */
- int addedSamples; /* number of samples inserted in off mode */
-
- /* variables for post-call statistics; queried through WebRtcNetEQ_GetJitterStatistics */
- uint32_t expandedVoiceSamples; /* number of voice samples produced through expand */
- uint32_t expandedNoiseSamples; /* number of noise (background) samples produced
- through expand */
-
-} DSPStats_t;
-
-typedef struct {
- int preemptive_expand_bgn_samples;
- int preemptive_expand_normal_samples;
-
- int expand_bgn_samples;
- int expand_normal_samples;
-
- int merge_expand_bgn_samples;
- int merge_expand_normal_samples;
-
- int accelerate_bgn_samples;
- int accelarate_normal_samples;
-} ActivityStats;
-
-
-#endif
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_stereo_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc
index d6c4150ec62..3c695c81d04 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_stereo_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_stereo_unittest.cc
@@ -10,14 +10,15 @@
// Test to verify correct stereo and multi-channel operation.
+#include <algorithm>
#include <string>
#include <list>
#include "gtest/gtest.h"
#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/modules/audio_coding/neteq4/tools/input_audio_file.h"
-#include "webrtc/modules/audio_coding/neteq4/tools/rtp_generator.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/tools/input_audio_file.h"
+#include "webrtc/modules/audio_coding/neteq/tools/rtp_generator.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/test/testsupport/gtest_disable.h"
@@ -53,14 +54,16 @@ class NetEqStereoTest : public ::testing::TestWithParam<TestParameters> {
frame_size_ms_(GetParam().frame_size),
frame_size_samples_(frame_size_ms_ * samples_per_ms_),
output_size_samples_(10 * samples_per_ms_),
- neteq_mono_(NetEq::Create(sample_rate_hz_)),
- neteq_(NetEq::Create(sample_rate_hz_)),
rtp_generator_mono_(samples_per_ms_),
rtp_generator_(samples_per_ms_),
payload_size_bytes_(0),
multi_payload_size_bytes_(0),
last_send_time_(0),
last_arrival_time_(0) {
+ NetEq::Config config;
+ config.sample_rate_hz = sample_rate_hz_;
+ neteq_mono_ = NetEq::Create(config);
+ neteq_ = NetEq::Create(config);
input_ = new int16_t[frame_size_samples_];
encoded_ = new uint8_t[2 * frame_size_samples_];
input_multi_channel_ = new int16_t[frame_size_samples_ * num_channels_];
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_tests.gypi b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_tests.gypi
index a2b9265613f..4d2ce252bdf 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_tests.gypi
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_tests.gypi
@@ -12,8 +12,10 @@
'target_name': 'neteq_rtpplay',
'type': 'executable',
'dependencies': [
- 'NetEq4',
- 'NetEq4TestTools',
+ 'neteq',
+ 'neteq_test_tools',
+ 'neteq_unittest_tools',
+ 'PCM16B',
'<(webrtc_root)/test/test.gyp:test_support_main',
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
],
@@ -29,7 +31,7 @@
'type': 'executable',
'dependencies': [
# TODO(hlundin): Make RTPencode use ACM to encode files.
- 'NetEq4TestTools',# Test helpers
+ 'neteq_test_tools',# Test helpers
'G711',
'G722',
'PCM16B',
@@ -47,6 +49,7 @@
'CODEC_PCM16B_WB',
'CODEC_ISAC_SWB',
'CODEC_PCM16B_32KHZ',
+ 'CODEC_PCM16B_48KHZ',
'CODEC_CNGCODEC8',
'CODEC_CNGCODEC16',
'CODEC_CNGCODEC32',
@@ -79,14 +82,16 @@
},
{
- 'target_name': 'RTPanalyze',
+ 'target_name': 'rtp_analyze',
'type': 'executable',
'dependencies': [
- 'NetEq4TestTools',
+ 'neteq_unittest_tools',
'<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
],
'sources': [
- 'test/RTPanalyze.cc',
+ 'tools/rtp_analyze.cc',
],
},
@@ -94,7 +99,7 @@
'target_name': 'RTPchange',
'type': 'executable',
'dependencies': [
- 'NetEq4TestTools',
+ 'neteq_test_tools',
'<(DEPTH)/testing/gtest.gyp:gtest',
],
'sources': [
@@ -106,7 +111,7 @@
'target_name': 'RTPtimeshift',
'type': 'executable',
'dependencies': [
- 'NetEq4TestTools',
+ 'neteq_test_tools',
'<(DEPTH)/testing/gtest.gyp:gtest',
],
'sources': [
@@ -118,7 +123,7 @@
'target_name': 'RTPcat',
'type': 'executable',
'dependencies': [
- 'NetEq4TestTools',
+ 'neteq_test_tools',
'<(DEPTH)/testing/gtest.gyp:gtest',
],
'sources': [
@@ -130,7 +135,7 @@
'target_name': 'rtp_to_text',
'type': 'executable',
'dependencies': [
- 'NetEq4TestTools',
+ 'neteq_test_tools',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
],
'sources': [
@@ -139,21 +144,66 @@
},
{
- 'target_name': 'neteq4_speed_test',
+ 'target_name': 'audio_classifier_test',
'type': 'executable',
'dependencies': [
- 'NetEq4',
- 'neteq_unittest_tools',
+ 'neteq',
+ ],
+ 'sources': [
+ 'test/audio_classifier_test.cc',
+ ],
+ },
+
+ {
+ 'target_name': 'neteq_test_support',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'neteq',
'PCM16B',
+ 'neteq_unittest_tools',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
],
'sources': [
+ 'tools/neteq_performance_test.cc',
+ 'tools/neteq_performance_test.h',
+ 'tools/neteq_quality_test.cc',
+ 'tools/neteq_quality_test.h',
+ ],
+ }, # neteq_test_support
+
+ {
+ 'target_name': 'neteq_speed_test',
+ 'type': 'executable',
+ 'dependencies': [
+ 'neteq',
+ 'neteq_test_support',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ '<(webrtc_root)/test/test.gyp:test_support_main',
+ ],
+ 'sources': [
'test/neteq_speed_test.cc',
],
},
{
- 'target_name': 'NetEq4TestTools',
+ 'target_name': 'neteq_opus_fec_quality_test',
+ 'type': 'executable',
+ 'dependencies': [
+ 'neteq',
+ 'neteq_test_support',
+ 'webrtc_opus',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ '<(webrtc_root)/test/test.gyp:test_support_main',
+ ],
+ 'sources': [
+ 'test/neteq_opus_fec_quality_test.cc',
+ ],
+ },
+
+ {
+ 'target_name': 'neteq_test_tools',
# Collection of useful functions used in other tests.
'type': 'static_library',
'variables': {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc
index 965f75f2cdd..0233e195003 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittest.cc
@@ -12,19 +12,20 @@
* This file includes unit tests for NetEQ.
*/
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include <math.h>
#include <stdlib.h>
#include <string.h> // memset
-#include <cmath>
+#include <algorithm>
#include <set>
#include <string>
#include <vector>
#include "gflags/gflags.h"
#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.h"
+#include "webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h"
#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/test/testsupport/gtest_disable.h"
@@ -140,7 +141,7 @@ void RefFiles::ReadFromFileAndCompare(
NetEqNetworkStatistics ref_stats;
ASSERT_EQ(1u, fread(&ref_stats, stat_size, 1, input_fp_));
// Compare
- EXPECT_EQ(0, memcmp(&stats, &ref_stats, stat_size));
+ ASSERT_EQ(0, memcmp(&stats, &ref_stats, stat_size));
}
}
@@ -173,11 +174,11 @@ void RefFiles::ReadFromFileAndCompare(
ASSERT_EQ(1u, fread(&(ref_stats.jitter), sizeof(ref_stats.jitter), 1,
input_fp_));
// Compare
- EXPECT_EQ(ref_stats.fraction_lost, stats.fraction_lost);
- EXPECT_EQ(ref_stats.cumulative_lost, stats.cumulative_lost);
- EXPECT_EQ(ref_stats.extended_max_sequence_number,
+ ASSERT_EQ(ref_stats.fraction_lost, stats.fraction_lost);
+ ASSERT_EQ(ref_stats.cumulative_lost, stats.cumulative_lost);
+ ASSERT_EQ(ref_stats.extended_max_sequence_number,
stats.extended_max_sequence_number);
- EXPECT_EQ(ref_stats.jitter, stats.jitter);
+ ASSERT_EQ(ref_stats.jitter, stats.jitter);
}
}
@@ -219,11 +220,22 @@ class NetEqDecodingTest : public ::testing::Test {
const std::set<uint16_t>& drop_seq_numbers,
bool expect_seq_no_wrap, bool expect_timestamp_wrap);
+ void LongCngWithClockDrift(double drift_factor,
+ double network_freeze_ms,
+ bool pull_audio_during_freeze,
+ int delay_tolerance_ms,
+ int max_time_to_speech_ms);
+
+ void DuplicateCng();
+
+ uint32_t PlayoutTimestamp();
+
NetEq* neteq_;
FILE* rtp_fp_;
unsigned int sim_clock_;
int16_t out_data_[kMaxBlockSize];
int output_sample_rate_;
+ int algorithmic_delay_ms_;
};
// Allocating the static const so that it can be passed by reference.
@@ -238,12 +250,18 @@ NetEqDecodingTest::NetEqDecodingTest()
: neteq_(NULL),
rtp_fp_(NULL),
sim_clock_(0),
- output_sample_rate_(kInitSampleRateHz) {
+ output_sample_rate_(kInitSampleRateHz),
+ algorithmic_delay_ms_(0) {
memset(out_data_, 0, sizeof(out_data_));
}
void NetEqDecodingTest::SetUp() {
- neteq_ = NetEq::Create(kInitSampleRateHz);
+ NetEq::Config config;
+ config.sample_rate_hz = kInitSampleRateHz;
+ neteq_ = NetEq::Create(config);
+ NetEqNetworkStatistics stat;
+ ASSERT_EQ(0, neteq_->NetworkStatistics(&stat));
+ algorithmic_delay_ms_ = stat.current_buffer_size_ms;
ASSERT_TRUE(neteq_);
LoadDecoders();
}
@@ -365,19 +383,20 @@ void NetEqDecodingTest::DecodeAndCheckStats(const std::string &rtp_file,
ASSERT_GT(rtp.readFromFile(rtp_fp_), 0);
while (rtp.dataLen() >= 0) {
int out_len;
- Process(&rtp, &out_len);
+ ASSERT_NO_FATAL_FAILURE(Process(&rtp, &out_len));
// Query the network statistics API once per second
if (sim_clock_ % 1000 == 0) {
// Process NetworkStatistics.
NetEqNetworkStatistics network_stats;
ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats));
- network_stat_files.ProcessReference(network_stats);
+ ASSERT_NO_FATAL_FAILURE(
+ network_stat_files.ProcessReference(network_stats));
// Process RTCPstat.
RtcpStatistics rtcp_stats;
neteq_->GetRtcpStatistics(&rtcp_stats);
- rtcp_stat_files.ProcessReference(rtcp_stats);
+ ASSERT_NO_FATAL_FAILURE(rtcp_stat_files.ProcessReference(rtcp_stats));
}
}
}
@@ -475,8 +494,8 @@ void NetEqDecodingTest::CheckBgnOff(int sampling_rate_hz,
ASSERT_EQ(expected_samples_per_channel, samples_per_channel);
// To be able to test the fading of background noise we need at lease to pull
- // 610 frames.
- const int kFadingThreshold = 610;
+ // 611 frames.
+ const int kFadingThreshold = 611;
// Test several CNG-to-PLC packet for the expected behavior. The number 20 is
// arbitrary, but sufficiently large to test enough number of frames.
@@ -507,56 +526,46 @@ void NetEqDecodingTest::CheckBgnOff(int sampling_rate_hz,
EXPECT_TRUE(plc_to_cng); // Just to be sure that PLC-to-CNG has occurred.
}
-#if defined(_WIN32) && defined(WEBRTC_ARCH_64_BITS)
-// Disabled for Windows 64-bit until webrtc:1458 is fixed.
-#define MAYBE_TestBitExactness DISABLED_TestBitExactness
-#else
-#define MAYBE_TestBitExactness TestBitExactness
-#endif
-
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(MAYBE_TestBitExactness)) {
- const std::string kInputRtpFile = webrtc::test::ProjectRootPath() +
+TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(TestBitExactness)) {
+ const std::string input_rtp_file = webrtc::test::ProjectRootPath() +
"resources/audio_coding/neteq_universal_new.rtp";
-#if defined(_MSC_VER) && (_MSC_VER >= 1700)
- // For Visual Studio 2012 and later, we will have to use the generic reference
- // file, rather than the windows-specific one.
- const std::string kInputRefFile = webrtc::test::ProjectRootPath() +
- "resources/audio_coding/neteq4_universal_ref.pcm";
-#else
- const std::string kInputRefFile =
+ // Note that neteq4_universal_ref.pcm and neteq4_universal_ref_win_32.pcm
+ // are identical. The latter could have been removed, but if clients still
+ // have a copy of the file, the test will fail.
+ const std::string input_ref_file =
webrtc::test::ResourcePath("audio_coding/neteq4_universal_ref", "pcm");
-#endif
if (FLAGS_gen_ref) {
- DecodeAndCompare(kInputRtpFile, "");
+ DecodeAndCompare(input_rtp_file, "");
} else {
- DecodeAndCompare(kInputRtpFile, kInputRefFile);
+ DecodeAndCompare(input_rtp_file, input_ref_file);
}
}
TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(TestNetworkStatistics)) {
- const std::string kInputRtpFile = webrtc::test::ProjectRootPath() +
+ const std::string input_rtp_file = webrtc::test::ProjectRootPath() +
"resources/audio_coding/neteq_universal_new.rtp";
#if defined(_MSC_VER) && (_MSC_VER >= 1700)
// For Visual Studio 2012 and later, we will have to use the generic reference
// file, rather than the windows-specific one.
- const std::string kNetworkStatRefFile = webrtc::test::ProjectRootPath() +
+ const std::string network_stat_ref_file = webrtc::test::ProjectRootPath() +
"resources/audio_coding/neteq4_network_stats.dat";
#else
- const std::string kNetworkStatRefFile =
+ const std::string network_stat_ref_file =
webrtc::test::ResourcePath("audio_coding/neteq4_network_stats", "dat");
#endif
- const std::string kRtcpStatRefFile =
+ const std::string rtcp_stat_ref_file =
webrtc::test::ResourcePath("audio_coding/neteq4_rtcp_stats", "dat");
if (FLAGS_gen_ref) {
- DecodeAndCheckStats(kInputRtpFile, "", "");
+ DecodeAndCheckStats(input_rtp_file, "", "");
} else {
- DecodeAndCheckStats(kInputRtpFile, kNetworkStatRefFile, kRtcpStatRefFile);
+ DecodeAndCheckStats(input_rtp_file, network_stat_ref_file,
+ rtcp_stat_ref_file);
}
}
// TODO(hlundin): Re-enable test once the statistics interface is up and again.
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(TestFrameWaitingTimeStatistics)) {
+TEST_F(NetEqDecodingTest, TestFrameWaitingTimeStatistics) {
// Use fax mode to avoid time-scaling. This is to simplify the testing of
// packet waiting times in the packet buffer.
neteq_->SetPlayoutMode(kPlayoutFax);
@@ -630,8 +639,7 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(TestFrameWaitingTimeStatistics)) {
EXPECT_EQ(100u, waiting_times.size());
}
-TEST_F(NetEqDecodingTest,
- DISABLED_ON_ANDROID(TestAverageInterArrivalTimeNegative)) {
+TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimeNegative) {
const int kNumFrames = 3000; // Needed for convergence.
int frame_index = 0;
const int kSamples = 10 * 16;
@@ -662,8 +670,7 @@ TEST_F(NetEqDecodingTest,
EXPECT_EQ(-103196, network_stats.clockdrift_ppm);
}
-TEST_F(NetEqDecodingTest,
- DISABLED_ON_ANDROID(TestAverageInterArrivalTimePositive)) {
+TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimePositive) {
const int kNumFrames = 5000; // Needed for convergence.
int frame_index = 0;
const int kSamples = 10 * 16;
@@ -694,16 +701,20 @@ TEST_F(NetEqDecodingTest,
EXPECT_EQ(110946, network_stats.clockdrift_ppm);
}
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(LongCngWithClockDrift)) {
+void NetEqDecodingTest::LongCngWithClockDrift(double drift_factor,
+ double network_freeze_ms,
+ bool pull_audio_during_freeze,
+ int delay_tolerance_ms,
+ int max_time_to_speech_ms) {
uint16_t seq_no = 0;
uint32_t timestamp = 0;
const int kFrameSizeMs = 30;
const int kSamples = kFrameSizeMs * 16;
const int kPayloadBytes = kSamples * 2;
- // Apply a clock drift of -25 ms / s (sender faster than receiver).
- const double kDriftFactor = 1000.0 / (1000.0 + 25.0);
double next_input_time_ms = 0.0;
double t_ms;
+ int out_len;
+ int num_channels;
NetEqOutputType type;
// Insert speech for 5 seconds.
@@ -718,18 +729,16 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(LongCngWithClockDrift)) {
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
++seq_no;
timestamp += kSamples;
- next_input_time_ms += static_cast<double>(kFrameSizeMs) * kDriftFactor;
+ next_input_time_ms += static_cast<double>(kFrameSizeMs) * drift_factor;
}
// Pull out data once.
- int out_len;
- int num_channels;
ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
&num_channels, &type));
ASSERT_EQ(kBlockSize16kHz, out_len);
}
EXPECT_EQ(kOutputNormal, type);
- int32_t delay_before = timestamp - neteq_->PlayoutTimestamp();
+ int32_t delay_before = timestamp - PlayoutTimestamp();
// Insert CNG for 1 minute (= 60000 ms).
const int kCngPeriodMs = 100;
@@ -746,11 +755,9 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(LongCngWithClockDrift)) {
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
++seq_no;
timestamp += kCngPeriodSamples;
- next_input_time_ms += static_cast<double>(kCngPeriodMs) * kDriftFactor;
+ next_input_time_ms += static_cast<double>(kCngPeriodMs) * drift_factor;
}
// Pull out data once.
- int out_len;
- int num_channels;
ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
&num_channels, &type));
ASSERT_EQ(kBlockSize16kHz, out_len);
@@ -758,7 +765,49 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(LongCngWithClockDrift)) {
EXPECT_EQ(kOutputCNG, type);
+ if (network_freeze_ms > 0) {
+ // First keep pulling audio for |network_freeze_ms| without inserting
+ // any data, then insert CNG data corresponding to |network_freeze_ms|
+ // without pulling any output audio.
+ const double loop_end_time = t_ms + network_freeze_ms;
+ for (; t_ms < loop_end_time; t_ms += 10) {
+ // Pull out data once.
+ ASSERT_EQ(0,
+ neteq_->GetAudio(
+ kMaxBlockSize, out_data_, &out_len, &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ EXPECT_EQ(kOutputCNG, type);
+ }
+ bool pull_once = pull_audio_during_freeze;
+ // If |pull_once| is true, GetAudio will be called once half-way through
+ // the network recovery period.
+ double pull_time_ms = (t_ms + next_input_time_ms) / 2;
+ while (next_input_time_ms <= t_ms) {
+ if (pull_once && next_input_time_ms >= pull_time_ms) {
+ pull_once = false;
+ // Pull out data once.
+ ASSERT_EQ(
+ 0,
+ neteq_->GetAudio(
+ kMaxBlockSize, out_data_, &out_len, &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ EXPECT_EQ(kOutputCNG, type);
+ t_ms += 10;
+ }
+ // Insert one CNG frame each 100 ms.
+ uint8_t payload[kPayloadBytes];
+ int payload_len;
+ WebRtcRTPHeader rtp_info;
+ PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
+ ++seq_no;
+ timestamp += kCngPeriodSamples;
+ next_input_time_ms += kCngPeriodMs * drift_factor;
+ }
+ }
+
// Insert speech again until output type is speech.
+ double speech_restart_time_ms = t_ms;
while (type != kOutputNormal) {
// Each turn in this for loop is 10 ms.
while (next_input_time_ms <= t_ms) {
@@ -769,11 +818,9 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(LongCngWithClockDrift)) {
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
++seq_no;
timestamp += kSamples;
- next_input_time_ms += static_cast<double>(kFrameSizeMs) * kDriftFactor;
+ next_input_time_ms += kFrameSizeMs * drift_factor;
}
// Pull out data once.
- int out_len;
- int num_channels;
ASSERT_EQ(0, neteq_->GetAudio(kMaxBlockSize, out_data_, &out_len,
&num_channels, &type));
ASSERT_EQ(kBlockSize16kHz, out_len);
@@ -781,13 +828,99 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(LongCngWithClockDrift)) {
t_ms += 10;
}
- int32_t delay_after = timestamp - neteq_->PlayoutTimestamp();
+ // Check that the speech starts again within reasonable time.
+ double time_until_speech_returns_ms = t_ms - speech_restart_time_ms;
+ EXPECT_LT(time_until_speech_returns_ms, max_time_to_speech_ms);
+ int32_t delay_after = timestamp - PlayoutTimestamp();
// Compare delay before and after, and make sure it differs less than 20 ms.
- EXPECT_LE(delay_after, delay_before + 20 * 16);
- EXPECT_GE(delay_after, delay_before - 20 * 16);
+ EXPECT_LE(delay_after, delay_before + delay_tolerance_ms * 16);
+ EXPECT_GE(delay_after, delay_before - delay_tolerance_ms * 16);
+}
+
+TEST_F(NetEqDecodingTest, LongCngWithNegativeClockDrift) {
+ // Apply a clock drift of -25 ms / s (sender faster than receiver).
+ const double kDriftFactor = 1000.0 / (1000.0 + 25.0);
+ const double kNetworkFreezeTimeMs = 0.0;
+ const bool kGetAudioDuringFreezeRecovery = false;
+ const int kDelayToleranceMs = 20;
+ const int kMaxTimeToSpeechMs = 100;
+ LongCngWithClockDrift(kDriftFactor,
+ kNetworkFreezeTimeMs,
+ kGetAudioDuringFreezeRecovery,
+ kDelayToleranceMs,
+ kMaxTimeToSpeechMs);
+}
+
+TEST_F(NetEqDecodingTest, LongCngWithPositiveClockDrift) {
+ // Apply a clock drift of +25 ms / s (sender slower than receiver).
+ const double kDriftFactor = 1000.0 / (1000.0 - 25.0);
+ const double kNetworkFreezeTimeMs = 0.0;
+ const bool kGetAudioDuringFreezeRecovery = false;
+ const int kDelayToleranceMs = 20;
+ const int kMaxTimeToSpeechMs = 100;
+ LongCngWithClockDrift(kDriftFactor,
+ kNetworkFreezeTimeMs,
+ kGetAudioDuringFreezeRecovery,
+ kDelayToleranceMs,
+ kMaxTimeToSpeechMs);
+}
+
+TEST_F(NetEqDecodingTest, LongCngWithNegativeClockDriftNetworkFreeze) {
+ // Apply a clock drift of -25 ms / s (sender faster than receiver).
+ const double kDriftFactor = 1000.0 / (1000.0 + 25.0);
+ const double kNetworkFreezeTimeMs = 5000.0;
+ const bool kGetAudioDuringFreezeRecovery = false;
+ const int kDelayToleranceMs = 50;
+ const int kMaxTimeToSpeechMs = 200;
+ LongCngWithClockDrift(kDriftFactor,
+ kNetworkFreezeTimeMs,
+ kGetAudioDuringFreezeRecovery,
+ kDelayToleranceMs,
+ kMaxTimeToSpeechMs);
}
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(UnknownPayloadType)) {
+TEST_F(NetEqDecodingTest, LongCngWithPositiveClockDriftNetworkFreeze) {
+ // Apply a clock drift of +25 ms / s (sender slower than receiver).
+ const double kDriftFactor = 1000.0 / (1000.0 - 25.0);
+ const double kNetworkFreezeTimeMs = 5000.0;
+ const bool kGetAudioDuringFreezeRecovery = false;
+ const int kDelayToleranceMs = 20;
+ const int kMaxTimeToSpeechMs = 100;
+ LongCngWithClockDrift(kDriftFactor,
+ kNetworkFreezeTimeMs,
+ kGetAudioDuringFreezeRecovery,
+ kDelayToleranceMs,
+ kMaxTimeToSpeechMs);
+}
+
+TEST_F(NetEqDecodingTest, LongCngWithPositiveClockDriftNetworkFreezeExtraPull) {
+ // Apply a clock drift of +25 ms / s (sender slower than receiver).
+ const double kDriftFactor = 1000.0 / (1000.0 - 25.0);
+ const double kNetworkFreezeTimeMs = 5000.0;
+ const bool kGetAudioDuringFreezeRecovery = true;
+ const int kDelayToleranceMs = 20;
+ const int kMaxTimeToSpeechMs = 100;
+ LongCngWithClockDrift(kDriftFactor,
+ kNetworkFreezeTimeMs,
+ kGetAudioDuringFreezeRecovery,
+ kDelayToleranceMs,
+ kMaxTimeToSpeechMs);
+}
+
+TEST_F(NetEqDecodingTest, LongCngWithoutClockDrift) {
+ const double kDriftFactor = 1.0; // No drift.
+ const double kNetworkFreezeTimeMs = 0.0;
+ const bool kGetAudioDuringFreezeRecovery = false;
+ const int kDelayToleranceMs = 10;
+ const int kMaxTimeToSpeechMs = 50;
+ LongCngWithClockDrift(kDriftFactor,
+ kNetworkFreezeTimeMs,
+ kGetAudioDuringFreezeRecovery,
+ kDelayToleranceMs,
+ kMaxTimeToSpeechMs);
+}
+
+TEST_F(NetEqDecodingTest, UnknownPayloadType) {
const int kPayloadBytes = 100;
uint8_t payload[kPayloadBytes] = {0};
WebRtcRTPHeader rtp_info;
@@ -798,18 +931,6 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(UnknownPayloadType)) {
EXPECT_EQ(NetEq::kUnknownRtpPayloadType, neteq_->LastError());
}
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(OversizePacket)) {
- // Payload size is greater than packet buffer size
- const int kPayloadBytes = NetEq::kMaxBytesInBuffer + 1;
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcRTPHeader rtp_info;
- PopulateRtpInfo(0, 0, &rtp_info);
- rtp_info.header.payloadType = 103; // iSAC, no packet splitting.
- EXPECT_EQ(NetEq::kFail,
- neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
- EXPECT_EQ(NetEq::kOversizePacket, neteq_->LastError());
-}
-
TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(DecoderError)) {
const int kPayloadBytes = 100;
uint8_t payload[kPayloadBytes] = {0};
@@ -849,7 +970,7 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(DecoderError)) {
}
}
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(GetAudioBeforeInsertPacket)) {
+TEST_F(NetEqDecodingTest, GetAudioBeforeInsertPacket) {
NetEqOutputType type;
// Set all of |out_data_| to 1, and verify that it was set to 0 by the call
// to GetAudio.
@@ -872,7 +993,7 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(GetAudioBeforeInsertPacket)) {
}
}
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(BackgroundNoise)) {
+TEST_F(NetEqDecodingTest, BackgroundNoise) {
neteq_->SetBackgroundNoiseMode(kBgnOn);
CheckBgnOff(8000, kBgnOn);
CheckBgnOff(16000, kBgnOn);
@@ -892,7 +1013,7 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(BackgroundNoise)) {
EXPECT_EQ(kBgnFade, neteq_->BackgroundNoiseMode());
}
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(SyncPacketInsert)) {
+TEST_F(NetEqDecodingTest, SyncPacketInsert) {
WebRtcRTPHeader rtp_info;
uint32_t receive_timestamp = 0;
// For the readability use the following payloads instead of the defaults of
@@ -971,12 +1092,16 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(SyncPacketInsert)) {
// First insert several noise like packets, then sync-packets. Decoding all
// packets should not produce error, statistics should not show any packet loss
// and sync-packets should decode to zero.
-TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(SyncPacketDecode)) {
+// TODO(turajs) we will have a better test if we have a referece NetEq, and
+// when Sync packets are inserted in "test" NetEq we insert all-zero payload
+// in reference NetEq and compare the output of those two.
+TEST_F(NetEqDecodingTest, SyncPacketDecode) {
WebRtcRTPHeader rtp_info;
PopulateRtpInfo(0, 0, &rtp_info);
const int kPayloadBytes = kBlockSize16kHz * sizeof(int16_t);
uint8_t payload[kPayloadBytes];
int16_t decoded[kBlockSize16kHz];
+ int algorithmic_frame_delay = algorithmic_delay_ms_ / 10 + 1;
for (int n = 0; n < kPayloadBytes; ++n) {
payload[n] = (rand() & 0xF0) + 1; // Non-zero random sequence.
}
@@ -986,7 +1111,6 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(SyncPacketDecode)) {
int num_channels;
int samples_per_channel;
uint32_t receive_timestamp = 0;
- int delay_samples = 0;
for (int n = 0; n < 100; ++n) {
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
receive_timestamp));
@@ -996,16 +1120,15 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(SyncPacketDecode)) {
ASSERT_EQ(kBlockSize16kHz, samples_per_channel);
ASSERT_EQ(1, num_channels);
- // Even if there is RTP packet in NetEq's buffer, the first frame pulled
- // from NetEq starts with few zero samples. Here we measure this delay.
- if (n == 0) {
- while(decoded[delay_samples] == 0) delay_samples++;
- }
rtp_info.header.sequenceNumber++;
rtp_info.header.timestamp += kBlockSize16kHz;
receive_timestamp += kBlockSize16kHz;
}
const int kNumSyncPackets = 10;
+
+ // Make sure sufficient number of sync packets are inserted that we can
+ // conduct a test.
+ ASSERT_GT(kNumSyncPackets, algorithmic_frame_delay);
// Insert sync-packets, the decoded sequence should be all-zero.
for (int n = 0; n < kNumSyncPackets; ++n) {
ASSERT_EQ(0, neteq_->InsertSyncPacket(rtp_info, receive_timestamp));
@@ -1014,38 +1137,44 @@ TEST_F(NetEqDecodingTest, DISABLED_ON_ANDROID(SyncPacketDecode)) {
&output_type));
ASSERT_EQ(kBlockSize16kHz, samples_per_channel);
ASSERT_EQ(1, num_channels);
- EXPECT_TRUE(IsAllZero(&decoded[delay_samples],
- samples_per_channel * num_channels - delay_samples));
- delay_samples = 0; // Delay only matters in the first frame.
+ if (n > algorithmic_frame_delay) {
+ EXPECT_TRUE(IsAllZero(decoded, samples_per_channel * num_channels));
+ }
rtp_info.header.sequenceNumber++;
rtp_info.header.timestamp += kBlockSize16kHz;
receive_timestamp += kBlockSize16kHz;
}
- // We insert a regular packet, if sync packet are not correctly buffered then
+
+ // We insert regular packets, if sync packet are not correctly buffered then
// network statistics would show some packet loss.
- ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
- receive_timestamp));
- ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded,
- &samples_per_channel, &num_channels,
- &output_type));
- // Make sure the last inserted packet is decoded and there are non-zero
- // samples.
- EXPECT_FALSE(IsAllZero(decoded, samples_per_channel * num_channels));
+ for (int n = 0; n <= algorithmic_frame_delay + 10; ++n) {
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
+ receive_timestamp));
+ ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded,
+ &samples_per_channel, &num_channels,
+ &output_type));
+ if (n >= algorithmic_frame_delay + 1) {
+ // Expect that this frame contain samples from regular RTP.
+ EXPECT_TRUE(IsAllNonZero(decoded, samples_per_channel * num_channels));
+ }
+ rtp_info.header.sequenceNumber++;
+ rtp_info.header.timestamp += kBlockSize16kHz;
+ receive_timestamp += kBlockSize16kHz;
+ }
NetEqNetworkStatistics network_stats;
ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats));
// Expecting a "clean" network.
EXPECT_EQ(0, network_stats.packet_loss_rate);
EXPECT_EQ(0, network_stats.expand_rate);
EXPECT_EQ(0, network_stats.accelerate_rate);
- EXPECT_EQ(0, network_stats.preemptive_rate);
+ EXPECT_LE(network_stats.preemptive_rate, 150);
}
// Test if the size of the packet buffer reported correctly when containing
// sync packets. Also, test if network packets override sync packets. That is to
// prefer decoding a network packet to a sync packet, if both have same sequence
// number and timestamp.
-TEST_F(NetEqDecodingTest,
- DISABLED_ON_ANDROID(SyncPacketBufferSizeAndOverridenByNetworkPackets)) {
+TEST_F(NetEqDecodingTest, SyncPacketBufferSizeAndOverridenByNetworkPackets) {
WebRtcRTPHeader rtp_info;
PopulateRtpInfo(0, 0, &rtp_info);
const int kPayloadBytes = kBlockSize16kHz * sizeof(int16_t);
@@ -1060,7 +1189,8 @@ TEST_F(NetEqDecodingTest,
int num_channels;
int samples_per_channel;
uint32_t receive_timestamp = 0;
- for (int n = 0; n < 1; ++n) {
+ int algorithmic_frame_delay = algorithmic_delay_ms_ / 10 + 1;
+ for (int n = 0; n < algorithmic_frame_delay; ++n) {
ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes,
receive_timestamp));
ASSERT_EQ(0, neteq_->GetAudio(kBlockSize16kHz, decoded,
@@ -1086,7 +1216,8 @@ TEST_F(NetEqDecodingTest,
}
NetEqNetworkStatistics network_stats;
ASSERT_EQ(0, neteq_->NetworkStatistics(&network_stats));
- EXPECT_EQ(kNumSyncPackets * 10, network_stats.current_buffer_size_ms);
+ EXPECT_EQ(kNumSyncPackets * 10 + algorithmic_delay_ms_,
+ network_stats.current_buffer_size_ms);
// Rewind |rtp_info| to that of the first sync packet.
memcpy(&rtp_info, &first_sync_packet_rtp_info, sizeof(rtp_info));
@@ -1129,7 +1260,7 @@ void NetEqDecodingTest::WrapTest(uint16_t start_seq_no,
NetEqOutputType output_type;
uint32_t receive_timestamp = 0;
- // Insert speech for 1 second.
+ // Insert speech for 2 seconds.
const int kSpeechDurationMs = 2000;
int packets_inserted = 0;
uint16_t last_seq_no;
@@ -1159,7 +1290,8 @@ void NetEqDecodingTest::WrapTest(uint16_t start_seq_no,
if (packets_inserted > 4) {
// Expect preferred and actual buffer size to be no more than 2 frames.
EXPECT_LE(network_stats.preferred_buffer_size_ms, kFrameSizeMs * 2);
- EXPECT_LE(network_stats.current_buffer_size_ms, kFrameSizeMs * 2);
+ EXPECT_LE(network_stats.current_buffer_size_ms, kFrameSizeMs * 2 +
+ algorithmic_delay_ms_);
}
last_seq_no = seq_no;
last_timestamp = timestamp;
@@ -1180,9 +1312,8 @@ void NetEqDecodingTest::WrapTest(uint16_t start_seq_no,
ASSERT_EQ(1, num_channels);
// Expect delay (in samples) to be less than 2 packets.
- EXPECT_LE(timestamp - neteq_->PlayoutTimestamp(),
+ EXPECT_LE(timestamp - PlayoutTimestamp(),
static_cast<uint32_t>(kSamples * 2));
-
}
// Make sure we have actually tested wrap-around.
ASSERT_EQ(expect_seq_no_wrap, seq_no_wrapped);
@@ -1216,4 +1347,91 @@ TEST_F(NetEqDecodingTest, TimestampAndSequenceNumberWrap) {
WrapTest(0xFFFF - 10, 0xFFFFFFFF - 5000, drop_seq_numbers, true, true);
}
-} // namespace
+void NetEqDecodingTest::DuplicateCng() {
+ uint16_t seq_no = 0;
+ uint32_t timestamp = 0;
+ const int kFrameSizeMs = 10;
+ const int kSampleRateKhz = 16;
+ const int kSamples = kFrameSizeMs * kSampleRateKhz;
+ const int kPayloadBytes = kSamples * 2;
+
+ const int algorithmic_delay_samples = std::max(
+ algorithmic_delay_ms_ * kSampleRateKhz, 5 * kSampleRateKhz / 8);
+ // Insert three speech packet. Three are needed to get the frame length
+ // correct.
+ int out_len;
+ int num_channels;
+ NetEqOutputType type;
+ uint8_t payload[kPayloadBytes] = {0};
+ WebRtcRTPHeader rtp_info;
+ for (int i = 0; i < 3; ++i) {
+ PopulateRtpInfo(seq_no, timestamp, &rtp_info);
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+ ++seq_no;
+ timestamp += kSamples;
+
+ // Pull audio once.
+ ASSERT_EQ(0,
+ neteq_->GetAudio(
+ kMaxBlockSize, out_data_, &out_len, &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ }
+ // Verify speech output.
+ EXPECT_EQ(kOutputNormal, type);
+
+ // Insert same CNG packet twice.
+ const int kCngPeriodMs = 100;
+ const int kCngPeriodSamples = kCngPeriodMs * kSampleRateKhz;
+ int payload_len;
+ PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
+ // This is the first time this CNG packet is inserted.
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
+
+ // Pull audio once and make sure CNG is played.
+ ASSERT_EQ(0,
+ neteq_->GetAudio(
+ kMaxBlockSize, out_data_, &out_len, &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ EXPECT_EQ(kOutputCNG, type);
+ EXPECT_EQ(timestamp - algorithmic_delay_samples, PlayoutTimestamp());
+
+ // Insert the same CNG packet again. Note that at this point it is old, since
+ // we have already decoded the first copy of it.
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, payload_len, 0));
+
+ // Pull audio until we have played |kCngPeriodMs| of CNG. Start at 10 ms since
+ // we have already pulled out CNG once.
+ for (int cng_time_ms = 10; cng_time_ms < kCngPeriodMs; cng_time_ms += 10) {
+ ASSERT_EQ(0,
+ neteq_->GetAudio(
+ kMaxBlockSize, out_data_, &out_len, &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ EXPECT_EQ(kOutputCNG, type);
+ EXPECT_EQ(timestamp - algorithmic_delay_samples,
+ PlayoutTimestamp());
+ }
+
+ // Insert speech again.
+ ++seq_no;
+ timestamp += kCngPeriodSamples;
+ PopulateRtpInfo(seq_no, timestamp, &rtp_info);
+ ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload, kPayloadBytes, 0));
+
+ // Pull audio once and verify that the output is speech again.
+ ASSERT_EQ(0,
+ neteq_->GetAudio(
+ kMaxBlockSize, out_data_, &out_len, &num_channels, &type));
+ ASSERT_EQ(kBlockSize16kHz, out_len);
+ EXPECT_EQ(kOutputNormal, type);
+ EXPECT_EQ(timestamp + kSamples - algorithmic_delay_samples,
+ PlayoutTimestamp());
+}
+
+uint32_t NetEqDecodingTest::PlayoutTimestamp() {
+ uint32_t playout_timestamp = 0;
+ EXPECT_TRUE(neteq_->GetPlayoutTimestamp(&playout_timestamp));
+ return playout_timestamp;
+}
+
+TEST_F(NetEqDecodingTest, DiscardDuplicateCng) { DuplicateCng(); }
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittests.isolate b/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittests.isolate
deleted file mode 100644
index e8f4e482aaf..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/neteq_unittests.isolate
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-{
- 'conditions': [
- ['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
- 'variables': {
- 'isolate_dependency_untracked': [
- '../../../../../data/',
- '../../../../../resources/',
- ],
- },
- }],
- ['OS=="linux" or OS=="mac" or OS=="win"', {
- 'variables': {
- 'command': [
- '../../../../testing/test_env.py',
- '<(PRODUCT_DIR)/neteq_unittests<(EXECUTABLE_SUFFIX)',
- ],
- 'isolate_dependency_touched': [
- '../../../../DEPS',
- ],
- 'isolate_dependency_tracked': [
- '../../../../resources/audio_coding/neteq_network_stats.dat',
- '../../../../resources/audio_coding/neteq_rtcp_stats.dat',
- '../../../../resources/audio_coding/neteq_universal.rtp',
- '../../../../resources/audio_coding/neteq_universal_ref.pcm',
- '../../../../resources/audio_coding/testfile32kHz.pcm',
- '../../../../testing/test_env.py',
- '<(PRODUCT_DIR)/neteq_unittests<(EXECUTABLE_SUFFIX)',
- ],
- 'isolate_dependency_untracked': [
- '../../../../tools/swarming_client/',
- ],
- },
- }],
- ],
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.c
deleted file mode 100644
index 8cbda521542..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.c
+++ /dev/null
@@ -1,279 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the function for handling "normal" speech operation.
- */
-#include "dsp.h"
-
-#include "signal_processing_library.h"
-
-#include "dsp_helpfunctions.h"
-
-/* Scratch usage:
-
- Type Name size startpos endpos
- int16_t pw16_expanded 125*fs/8000 0 125*fs/8000-1
-
- func WebRtcNetEQ_Expand 40+370*fs/8000 125*fs/8000 39+495*fs/8000
-
- Total: 40+495*fs/8000
- */
-
-#define SCRATCH_PW16_EXPANDED 0
-#if (defined(NETEQ_48KHZ_WIDEBAND))
-#define SCRATCH_NETEQ_EXPAND 756
-#elif (defined(NETEQ_32KHZ_WIDEBAND))
-#define SCRATCH_NETEQ_EXPAND 504
-#elif (defined(NETEQ_WIDEBAND))
-#define SCRATCH_NETEQ_EXPAND 252
-#else /* NB */
-#define SCRATCH_NETEQ_EXPAND 126
-#endif
-
-/****************************************************************************
- * WebRtcNetEQ_Normal(...)
- *
- * This function has the possibility to modify data that is played out in Normal
- * mode, for example adjust the gain of the signal. The length of the signal
- * can not be changed.
- *
- * Input:
- * - inst : NetEq instance, i.e. the user that requests more
- * speech/audio data
- * - scratchPtr : Pointer to scratch vector
- * - decoded : Pointer to vector of new data from decoder
- * (Vector contents may be altered by the function)
- * - len : Number of input samples
- *
- * Output:
- * - inst : Updated user information
- * - outData : Pointer to a memory space where the output data
- * should be stored
- * - pw16_len : Pointer to variable where the number of samples
- * produced will be written
- *
- * Return value : >=0 - Number of samples written to outData
- * -1 - Error
- */
-
-int WebRtcNetEQ_Normal(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- int16_t *pw16_decoded, int16_t len,
- int16_t *pw16_outData, int16_t *pw16_len)
-{
-
- int i;
- int16_t fs_mult;
- int16_t fs_shift;
- int32_t w32_En_speech;
- int16_t enLen;
- int16_t w16_muted;
- int16_t w16_inc, w16_frac;
- int16_t w16_tmp;
- int32_t w32_tmp;
-
- /* Sanity check */
- if (len < 0)
- {
- /* Cannot have negative length of input vector */
- return (-1);
- }
-
- if (len == 0)
- {
- /* Still got some data to play => continue with the same mode */
- *pw16_len = len;
- return (len);
- }
-
- fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
- fs_shift = 30 - WebRtcSpl_NormW32(fs_mult); /* Note that this is not "exact" for 48kHz */
-
- /*
- * Check if last RecOut call resulted in an Expand or a FadeToBGN. If so, we have to take
- * care of some cross-fading and unmuting.
- */
- if (inst->w16_mode == MODE_EXPAND || inst->w16_mode == MODE_FADE_TO_BGN)
- {
-
- /* Define memory where temporary result from Expand algorithm can be stored. */
-#ifdef SCRATCH
- int16_t *pw16_expanded = pw16_scratchPtr + SCRATCH_PW16_EXPANDED;
-#else
- int16_t pw16_expanded[FSMULT * 125];
-#endif
- int16_t expandedLen = 0;
- int16_t w16_decodedMax;
-
- /* Find largest value in new data */
- w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (int16_t) len);
-
- /* Generate interpolation data using Expand */
- /* First, set Expand parameters to appropriate values. */
- inst->ExpandInst.w16_lagsPosition = 0;
- inst->ExpandInst.w16_lagsDirection = 0;
- inst->ExpandInst.w16_stopMuting = 1; /* Do not mute signal any more */
-
- /* Call Expand */
- WebRtcNetEQ_Expand(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
-#endif
- pw16_expanded, &expandedLen, (int16_t) (inst->w16_mode == MODE_FADE_TO_BGN));
-
- inst->ExpandInst.w16_stopMuting = 0; /* Restore value */
- inst->ExpandInst.w16_consecExp = 0; /* Last was not Expand any more */
-
- /* Adjust muting factor (main muting factor times expand muting factor) */
- if (inst->w16_mode == MODE_FADE_TO_BGN)
- {
- /* If last mode was FadeToBGN, the mute factor should be zero. */
- inst->w16_muteFactor = 0;
- }
- else
- {
- /* w16_muteFactor * w16_expandMuteFactor */
- inst->w16_muteFactor
- = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(inst->w16_muteFactor,
- inst->ExpandInst.w16_expandMuteFactor, 14);
- }
-
- /* Adjust muting factor if needed (to BGN level) */
- enLen = WEBRTC_SPL_MIN(fs_mult<<6, len); /* min( fs_mult * 64, len ) */
- w16_tmp = 6 + fs_shift - WebRtcSpl_NormW32(
- WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax));
- w16_tmp = WEBRTC_SPL_MAX(w16_tmp, 0);
- w32_En_speech = WebRtcNetEQ_DotW16W16(pw16_decoded, pw16_decoded, enLen, w16_tmp);
- w32_En_speech = WebRtcSpl_DivW32W16(w32_En_speech, (int16_t) (enLen >> w16_tmp));
-
- if ((w32_En_speech != 0) && (w32_En_speech > inst->BGNInst.w32_energy))
- {
- /* Normalize new frame energy to 15 bits */
- w16_tmp = WebRtcSpl_NormW32(w32_En_speech) - 16;
- /* we want inst->BGNInst.energy/En_speech in Q14 */
- w32_tmp = WEBRTC_SPL_SHIFT_W32(inst->BGNInst.w32_energy, (w16_tmp+14));
- w16_tmp = (int16_t) WEBRTC_SPL_SHIFT_W32(w32_En_speech, w16_tmp);
- w16_tmp = (int16_t) WebRtcSpl_DivW32W16(w32_tmp, w16_tmp);
- w16_muted = (int16_t) WebRtcSpl_SqrtFloor(
- WEBRTC_SPL_LSHIFT_W32((int32_t) w16_tmp,
- 14)); /* w16_muted in Q14 (sqrt(Q28)) */
- }
- else
- {
- w16_muted = 16384; /* 1.0 in Q14 */
- }
- if (w16_muted > inst->w16_muteFactor)
- {
- inst->w16_muteFactor = WEBRTC_SPL_MIN(w16_muted, 16384);
- }
-
- /* If muted increase by 0.64 for every 20 ms (NB/WB 0.0040/0.0020 in Q14) */
- w16_inc = WebRtcSpl_DivW32W16ResW16(64, fs_mult);
- for (i = 0; i < len; i++)
- {
- /* scale with mute factor */
- w32_tmp = WEBRTC_SPL_MUL_16_16(pw16_decoded[i], inst->w16_muteFactor);
- /* shift 14 with proper rounding */
- pw16_decoded[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32((w32_tmp + 8192), 14);
- /* increase mute_factor towards 16384 */
- inst->w16_muteFactor = WEBRTC_SPL_MIN(16384, (inst->w16_muteFactor+w16_inc));
- }
-
- /*
- * Interpolate the expanded data into the new vector
- * (NB/WB/SWB32/SWB40 8/16/32/32 samples)
- */
- fs_shift = WEBRTC_SPL_MIN(3, fs_shift); /* Set to 3 for >32kHz */
- w16_inc = 4 >> fs_shift;
- w16_frac = w16_inc;
- for (i = 0; i < 8 * fs_mult; i++)
- {
- pw16_decoded[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
- (WEBRTC_SPL_MUL_16_16(w16_frac, pw16_decoded[i]) +
- WEBRTC_SPL_MUL_16_16((32 - w16_frac), pw16_expanded[i]) + 8),
- 5);
- w16_frac += w16_inc;
- }
-
-#ifdef NETEQ_CNG_CODEC
- }
- else if (inst->w16_mode==MODE_RFC3389CNG)
- { /* previous was RFC 3389 CNG...*/
- int16_t pw16_CngInterp[32];
- /* Reset mute factor and start up fresh */
- inst->w16_muteFactor = 16384;
- if (inst->CNG_Codec_inst != NULL)
- {
- /* Generate long enough for 32kHz */
- if(WebRtcCng_Generate(inst->CNG_Codec_inst,pw16_CngInterp, 32, 0)<0)
- {
- /* error returned; set return vector to all zeros */
- WebRtcSpl_MemSetW16(pw16_CngInterp, 0, 32);
- }
- }
- else
- {
- /*
- * If no CNG instance is defined, just copy from the decoded data.
- * (This will result in interpolating the decoded with itself.)
- */
- WEBRTC_SPL_MEMCPY_W16(pw16_CngInterp, pw16_decoded, fs_mult * 8);
- }
- /*
- * Interpolate the CNG into the new vector
- * (NB/WB/SWB32kHz/SWB48kHz 8/16/32/32 samples)
- */
- fs_shift = WEBRTC_SPL_MIN(3, fs_shift); /* Set to 3 for >32kHz */
- w16_inc = 4>>fs_shift;
- w16_frac = w16_inc;
- for (i = 0; i < 8 * fs_mult; i++)
- {
- pw16_decoded[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
- (WEBRTC_SPL_MUL_16_16(w16_frac, pw16_decoded[i]) +
- WEBRTC_SPL_MUL_16_16((32-w16_frac), pw16_CngInterp[i]) + 8),
- 5);
- w16_frac += w16_inc;
- }
-#endif
-
- }
- else if (inst->w16_muteFactor < 16384)
- {
- /*
- * Previous was neither of Expand, FadeToBGN or RFC3389_CNG, but we are still
- * ramping up from previous muting.
- * If muted increase by 0.64 for every 20 ms (NB/WB 0.0040/0.0020 in Q14)
- */
- w16_inc = WebRtcSpl_DivW32W16ResW16(64, fs_mult);
- for (i = 0; i < len; i++)
- {
- /* scale with mute factor */
- w32_tmp = WEBRTC_SPL_MUL_16_16(pw16_decoded[i], inst->w16_muteFactor);
- /* shift 14 with proper rounding */
- pw16_decoded[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32((w32_tmp + 8192), 14);
- /* increase mute_factor towards 16384 */
- inst->w16_muteFactor = WEBRTC_SPL_MIN(16384, (inst->w16_muteFactor+w16_inc));
- }
- }
-
- /* Copy data to other buffer */WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, len);
-
- inst->w16_mode = MODE_NORMAL;
- *pw16_len = len;
- return (len);
-
-}
-
-#undef SCRATCH_PW16_EXPANDED
-#undef SCRATCH_NETEQ_EXPAND
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/normal.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.cc
index 8d9c020f96d..bfde179bd17 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/normal.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/normal.h"
+#include "webrtc/modules/audio_coding/neteq/normal.h"
#include <string.h> // memset, memcpy
@@ -16,11 +16,11 @@
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/expand.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/normal.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.h
index fa14685f9bb..aa24b528af4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/normal.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal.h
@@ -8,16 +8,16 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_NORMAL_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_NORMAL_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_NORMAL_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_NORMAL_H_
#include <string.h> // Access to size_t.
#include <vector>
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/defines.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/defines.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -65,4 +65,4 @@ class Normal {
};
} // namespace webrtc
-#endif // SRC_MODULES_AUDIO_CODING_NETEQ4_NORMAL_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_NORMAL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/normal_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal_unittest.cc
index 2bd7b894f42..c855865cfaa 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/normal_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/normal_unittest.cc
@@ -10,16 +10,16 @@
// Unit tests for Normal class.
-#include "webrtc/modules/audio_coding/neteq4/normal.h"
+#include "webrtc/modules/audio_coding/neteq/normal.h"
#include <vector>
#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
-#include "webrtc/modules/audio_coding/neteq4/expand.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/random_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/expand.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/random_vector.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet.h
index 4518f91381a..89ddda782cf 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet.h
@@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PACKET_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PACKET_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_PACKET_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_PACKET_H_
#include <list>
@@ -85,4 +85,4 @@ struct Packet {
typedef std::list<Packet*> PacketList;
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PACKET_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_PACKET_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.c
deleted file mode 100644
index a542333cf87..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.c
+++ /dev/null
@@ -1,851 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Implementation of the actual packet buffer data structure.
- */
-
-#include <assert.h>
-#include "packet_buffer.h"
-
-#include <string.h> /* to define NULL */
-
-#include "signal_processing_library.h"
-
-#include "mcu_dsp_common.h"
-
-#include "neteq_error_codes.h"
-
-#ifdef NETEQ_DELAY_LOGGING
-/* special code for offline delay logging */
-#include "delay_logging.h"
-#include <stdio.h>
-
-extern FILE *delay_fid2; /* file pointer to delay log file */
-extern uint32_t tot_received_packets;
-#endif /* NETEQ_DELAY_LOGGING */
-
-
-int WebRtcNetEQ_PacketBufferInit(PacketBuf_t *bufferInst, int maxNoOfPackets,
- int16_t *pw16_memory, int memorySize)
-{
- int i;
- int pos = 0;
-
- /* Sanity check */
- if ((memorySize < PBUFFER_MIN_MEMORY_SIZE) || (pw16_memory == NULL)
- || (maxNoOfPackets < 2) || (maxNoOfPackets > 600))
- {
- /* Invalid parameters */
- return (PBUFFER_INIT_ERROR);
- }
-
- /* Clear the buffer instance */
- WebRtcSpl_MemSetW16((int16_t*) bufferInst, 0,
- sizeof(PacketBuf_t) / sizeof(int16_t));
-
- /* Clear the buffer memory */
- WebRtcSpl_MemSetW16((int16_t*) pw16_memory, 0, memorySize);
-
- /* Set maximum number of packets */
- bufferInst->maxInsertPositions = maxNoOfPackets;
-
- /* Initialize array pointers */
- /* After each pointer has been set, the index pos is advanced to point immediately
- * after the the recently allocated vector. Note that one step for the pos index
- * corresponds to a int16_t.
- */
-
- bufferInst->timeStamp = (uint32_t*) &pw16_memory[pos];
- pos += maxNoOfPackets << 1; /* advance maxNoOfPackets * uint32_t */
-
- bufferInst->payloadLocation = (int16_t**) &pw16_memory[pos];
- pos += maxNoOfPackets * (sizeof(int16_t*) / sizeof(int16_t)); /* advance */
-
- bufferInst->seqNumber = (uint16_t*) &pw16_memory[pos];
- pos += maxNoOfPackets; /* advance maxNoOfPackets * uint16_t */
-
- bufferInst->payloadType = &pw16_memory[pos];
- pos += maxNoOfPackets; /* advance maxNoOfPackets * int16_t */
-
- bufferInst->payloadLengthBytes = &pw16_memory[pos];
- pos += maxNoOfPackets; /* advance maxNoOfPackets * int16_t */
-
- bufferInst->rcuPlCntr = &pw16_memory[pos];
- pos += maxNoOfPackets; /* advance maxNoOfPackets * int16_t */
-
- bufferInst->waitingTime = (int*) (&pw16_memory[pos]);
- /* Advance maxNoOfPackets * sizeof(waitingTime element). */
- pos += maxNoOfPackets *
- sizeof(*bufferInst->waitingTime) / sizeof(*pw16_memory);
-
- /* The payload memory starts after the slot arrays */
- bufferInst->startPayloadMemory = &pw16_memory[pos];
- bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
- bufferInst->memorySizeW16 = (memorySize - pos); /* Remaining memory */
-
- /* Initialize each payload slot as empty with infinite delay */
- for (i = 0; i < bufferInst->maxInsertPositions; i++)
- {
- bufferInst->payloadType[i] = -1;
- }
-
- /* Reset buffer parameters */
- bufferInst->numPacketsInBuffer = 0;
- bufferInst->packSizeSamples = 0;
- bufferInst->insertPosition = 0;
-
- /* Reset buffer statistics */
- bufferInst->discardedPackets = 0;
-
- return (0);
-}
-
-
-int WebRtcNetEQ_PacketBufferFlush(PacketBuf_t *bufferInst)
-{
- int i;
-
- /* Sanity check */
- if (bufferInst->startPayloadMemory == NULL)
- {
- /* Packet buffer has not been initialized */
- /* Don't do the flushing operation, since we do not
- know the state of the struct variables */
- return (0);
- }
-
- /* Set all payload lengths to zero */
- WebRtcSpl_MemSetW16(bufferInst->payloadLengthBytes, 0, bufferInst->maxInsertPositions);
-
- /* Reset buffer variables */
- bufferInst->numPacketsInBuffer = 0;
- bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
- bufferInst->insertPosition = 0;
-
- /* Clear all slots, starting with the last one */
- for (i = (bufferInst->maxInsertPositions - 1); i >= 0; i--)
- {
- bufferInst->payloadType[i] = -1;
- bufferInst->timeStamp[i] = 0;
- bufferInst->seqNumber[i] = 0;
- }
-
- return (0);
-}
-
-
-int WebRtcNetEQ_PacketBufferInsert(PacketBuf_t *bufferInst, const RTPPacket_t *RTPpacket,
- int16_t *flushed, int av_sync)
-{
- int nextPos;
- int i;
-
-#ifdef NETEQ_DELAY_LOGGING
- /* special code for offline delay logging */
- int temp_var;
-#endif /* NETEQ_DELAY_LOGGING */
-
- /* Initialize to "no flush" */
- *flushed = 0;
-
- /* Sanity check */
- if (bufferInst->startPayloadMemory == NULL)
- {
- /* packet buffer has not been initialized */
- return (-1);
- }
-
- /* Sanity check for payload length
- (payloadLen in bytes and memory size in int16_t) */
- if ((RTPpacket->payloadLen > (bufferInst->memorySizeW16 << 1)) || (RTPpacket->payloadLen
- <= 0))
- {
- /* faulty or too long payload length */
- return (-1);
- }
-
- /* If we are in AV-sync mode, there is a risk that we have inserted a sync
- * packet but now received the real version of it. Or because of some timing
- * we might be overwriting a true payload with sync (I'm not sure why this
- * should happen in regular case, but in some FEC enabled case happens).
- * Go through packets and delete the sync version of the packet in hand. Or
- * if this is sync packet and the regular version of it exists in the buffer
- * refrain from inserting.
- *
- * TODO(turajs): Could we get this for free if we had set the RCU-counter of
- * the sync packet to a number larger than 2?
- */
- if (av_sync) {
- for (i = 0; i < bufferInst->maxInsertPositions; ++i) {
- /* Check if sequence numbers match and the payload actually exists. */
- if (bufferInst->seqNumber[i] == RTPpacket->seqNumber &&
- bufferInst->payloadLengthBytes[i] > 0) {
- if (WebRtcNetEQ_IsSyncPayload(RTPpacket->payload,
- RTPpacket->payloadLen)) {
- return 0;
- }
-
- if (WebRtcNetEQ_IsSyncPayload(bufferInst->payloadLocation[i],
- bufferInst->payloadLengthBytes[i])) {
- /* Clear the position in the buffer. */
- bufferInst->payloadType[i] = -1;
- bufferInst->payloadLengthBytes[i] = 0;
-
- /* Reduce packet counter by one. */
- bufferInst->numPacketsInBuffer--;
- /* TODO(turajs) if this is the latest packet better we rewind
- * insertPosition and related variables. */
- break; /* There should be only one match. */
- }
- }
- }
- }
-
- /* Find a position in the buffer for this packet */
- if (bufferInst->numPacketsInBuffer != 0)
- {
- /* Get the next slot */
- bufferInst->insertPosition++;
- if (bufferInst->insertPosition >= bufferInst->maxInsertPositions)
- {
- /* "Wrap around" and start from the beginning */
- bufferInst->insertPosition = 0;
- }
-
- /* Check if there is enough space for the new packet */
- if (bufferInst->currentMemoryPos + ((RTPpacket->payloadLen + 1) >> 1)
- >= &bufferInst->startPayloadMemory[bufferInst->memorySizeW16])
- {
- int16_t *tempMemAddress;
-
- /*
- * Payload does not fit at the end of the memory, put it in the beginning
- * instead
- */
- bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
-
- /*
- * Now, we must search for the next non-empty payload,
- * finding the one with the lowest start address for the payload
- */
- tempMemAddress = &bufferInst->startPayloadMemory[bufferInst->memorySizeW16];
- nextPos = -1;
-
- /* Loop through all slots again */
- for (i = 0; i < bufferInst->maxInsertPositions; i++)
- {
- /* Look for the non-empty slot with the lowest
- payload location address */
- if (bufferInst->payloadLengthBytes[i] != 0 && bufferInst->payloadLocation[i]
- < tempMemAddress)
- {
- tempMemAddress = bufferInst->payloadLocation[i];
- nextPos = i;
- }
- }
-
- /* Check that we did find a previous payload */
- if (nextPos == -1)
- {
- /* The buffer is corrupt => flush and return error */
- WebRtcNetEQ_PacketBufferFlush(bufferInst);
- *flushed = 1;
- return (-1);
- }
- }
- else
- {
- /* Payload fits at the end of memory. */
-
- /* Find the next non-empty slot. */
- nextPos = bufferInst->insertPosition + 1;
-
- /* Increase nextPos until a non-empty slot is found or end of array is encountered*/
- while ((bufferInst->payloadLengthBytes[nextPos] == 0) && (nextPos
- < bufferInst->maxInsertPositions))
- {
- nextPos++;
- }
-
- if (nextPos == bufferInst->maxInsertPositions)
- {
- /*
- * Reached the end of the array, so there must be a packet in the first
- * position instead
- */
- nextPos = 0;
-
- /* Increase nextPos until a non-empty slot is found */
- while (bufferInst->payloadLengthBytes[nextPos] == 0)
- {
- nextPos++;
- }
- }
- } /* end if-else */
-
- /*
- * Check if the new payload will extend into a payload later in memory.
- * If so, the buffer is full.
- */
- if ((bufferInst->currentMemoryPos <= bufferInst->payloadLocation[nextPos])
- && ((&bufferInst->currentMemoryPos[(RTPpacket->payloadLen + 1) >> 1])
- > bufferInst->payloadLocation[nextPos]))
- {
- /* Buffer is full, so the buffer must be flushed */
- WebRtcNetEQ_PacketBufferFlush(bufferInst);
- *flushed = 1;
- }
-
- if (bufferInst->payloadLengthBytes[bufferInst->insertPosition] != 0)
- {
- /* All positions are already taken and entire buffer should be flushed */
- WebRtcNetEQ_PacketBufferFlush(bufferInst);
- *flushed = 1;
- }
-
- }
- else
- {
- /* Buffer is empty, just insert the packet at the beginning */
- bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
- bufferInst->insertPosition = 0;
- }
-
- /* Insert packet in the found position */
- if (RTPpacket->starts_byte1 == 0)
- {
- /* Payload is 16-bit aligned => just copy it */
-
- WEBRTC_SPL_MEMCPY_W8(bufferInst->currentMemoryPos,
- RTPpacket->payload, RTPpacket->payloadLen);
- }
- else
- {
- /* Payload is not 16-bit aligned => align it during copy operation */
- for (i = 0; i < RTPpacket->payloadLen; i++)
- {
- /* copy the (i+1)-th byte to the i-th byte */
-
- WEBRTC_SPL_SET_BYTE(bufferInst->currentMemoryPos,
- (WEBRTC_SPL_GET_BYTE(RTPpacket->payload, (i + 1))), i);
- }
- }
-
- /* Copy the packet information */
- bufferInst->payloadLocation[bufferInst->insertPosition] = bufferInst->currentMemoryPos;
- bufferInst->payloadLengthBytes[bufferInst->insertPosition] = RTPpacket->payloadLen;
- bufferInst->payloadType[bufferInst->insertPosition] = RTPpacket->payloadType;
- bufferInst->seqNumber[bufferInst->insertPosition] = RTPpacket->seqNumber;
- bufferInst->timeStamp[bufferInst->insertPosition] = RTPpacket->timeStamp;
- bufferInst->rcuPlCntr[bufferInst->insertPosition] = RTPpacket->rcuPlCntr;
- bufferInst->waitingTime[bufferInst->insertPosition] = 0;
- /* Update buffer parameters */
- bufferInst->numPacketsInBuffer++;
- bufferInst->currentMemoryPos += (RTPpacket->payloadLen + 1) >> 1;
-
-#ifdef NETEQ_DELAY_LOGGING
- /* special code for offline delay logging */
- if (*flushed)
- {
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_FLUSH;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
- }
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_RECIN;
- if ((fwrite(&temp_var, sizeof(int),
- 1, delay_fid2) != 1) ||
- (fwrite(&RTPpacket->timeStamp, sizeof(uint32_t),
- 1, delay_fid2) != 1) ||
- (fwrite(&RTPpacket->seqNumber, sizeof(uint16_t),
- 1, delay_fid2) != 1) ||
- (fwrite(&RTPpacket->payloadType, sizeof(int),
- 1, delay_fid2) != 1) ||
- (fwrite(&RTPpacket->payloadLen, sizeof(int16_t),
- 1, delay_fid2) != 1)) {
- return -1;
- }
- tot_received_packets++;
-#endif /* NETEQ_DELAY_LOGGING */
-
- return (0);
-}
-
-
-int WebRtcNetEQ_PacketBufferExtract(PacketBuf_t *bufferInst, RTPPacket_t *RTPpacket,
- int bufferPosition, int *waitingTime)
-{
-
- /* Sanity check */
- if (bufferInst->startPayloadMemory == NULL)
- {
- /* packet buffer has not been initialized */
- return (PBUFFER_NOT_INITIALIZED);
- }
-
- if (bufferPosition < 0 || bufferPosition >= bufferInst->maxInsertPositions)
- {
- /* buffer position is outside valid range */
- return (NETEQ_OTHER_ERROR);
- }
-
- /* Check that there is a valid payload in the specified position */
- if (bufferInst->payloadLengthBytes[bufferPosition] <= 0)
- {
- /* The position does not contain a valid payload */
- RTPpacket->payloadLen = 0; /* Set zero length */
- return (PBUFFER_NONEXISTING_PACKET); /* Return error */
- }
-
- /* Payload exists => extract payload data */
-
- /* Copy the actual data payload to RTP packet struct */
-
- WEBRTC_SPL_MEMCPY_W16((int16_t*) RTPpacket->payload,
- bufferInst->payloadLocation[bufferPosition],
- (bufferInst->payloadLengthBytes[bufferPosition] + 1) >> 1); /*length in int16_t*/
-
- /* Copy payload parameters */
- RTPpacket->payloadLen = bufferInst->payloadLengthBytes[bufferPosition];
- RTPpacket->payloadType = bufferInst->payloadType[bufferPosition];
- RTPpacket->seqNumber = bufferInst->seqNumber[bufferPosition];
- RTPpacket->timeStamp = bufferInst->timeStamp[bufferPosition];
- RTPpacket->rcuPlCntr = bufferInst->rcuPlCntr[bufferPosition];
- *waitingTime = bufferInst->waitingTime[bufferPosition];
- RTPpacket->starts_byte1 = 0; /* payload is 16-bit aligned */
-
- /* Clear the position in the packet buffer */
- bufferInst->payloadType[bufferPosition] = -1;
- bufferInst->payloadLengthBytes[bufferPosition] = 0;
- bufferInst->seqNumber[bufferPosition] = 0;
- bufferInst->timeStamp[bufferPosition] = 0;
- bufferInst->waitingTime[bufferPosition] = 0;
- bufferInst->payloadLocation[bufferPosition] = bufferInst->startPayloadMemory;
-
- /* Reduce packet counter with one */
- bufferInst->numPacketsInBuffer--;
-
- return (0);
-}
-
-int WebRtcNetEQ_PacketBufferFindLowestTimestamp(PacketBuf_t* buffer_inst,
- uint32_t current_time_stamp,
- uint32_t* time_stamp,
- int* buffer_position,
- int erase_old_packets,
- int16_t* payload_type) {
- int32_t time_stamp_diff = WEBRTC_SPL_WORD32_MAX; /* Smallest diff found. */
- int32_t new_diff;
- int i;
- int16_t rcu_payload_cntr;
- if (buffer_inst->startPayloadMemory == NULL) {
- /* Packet buffer has not been initialized. */
- return PBUFFER_NOT_INITIALIZED;
- }
-
- /* Initialize all return values. */
- *time_stamp = 0;
- *payload_type = -1; /* Indicates that no packet was found. */
- *buffer_position = -1; /* Indicates that no packet was found. */
- rcu_payload_cntr = WEBRTC_SPL_WORD16_MAX; /* Indicates no packet found. */
-
- /* Check if buffer is empty. */
- if (buffer_inst->numPacketsInBuffer <= 0) {
- return 0;
- }
-
- /* Loop through all slots in buffer. */
- if (erase_old_packets) { /* If old payloads should be discarded. */
- for (i = 0; i < buffer_inst->maxInsertPositions; ++i) {
- /* Calculate difference between this slot and current_time_stamp. */
- new_diff = (int32_t)(buffer_inst->timeStamp[i] - current_time_stamp);
-
- /* Check if payload should be discarded. */
- if ((new_diff < 0) /* Payload is too old */
- && (new_diff > -30000) /* Account for TS wrap-around. */
- && (buffer_inst->payloadLengthBytes[i] > 0)) { /* Payload exists. */
- /* Throw away old packet. */
-
- /* Clear the position in the buffer. */
- buffer_inst->payloadType[i] = -1;
- buffer_inst->payloadLengthBytes[i] = 0;
-
- /* Reduce packet counter by one. */
- buffer_inst->numPacketsInBuffer--;
- /* Increase discard counter for in-call statistics. */
- buffer_inst->discardedPackets++;
- } else if (((new_diff < time_stamp_diff)
- || ((new_diff == time_stamp_diff)
- && (buffer_inst->rcuPlCntr[i] < rcu_payload_cntr)))
- && (buffer_inst->payloadLengthBytes[i] > 0)) {
- /* New diff is smaller than previous diffs or we have a candidate with a
- * time stamp as previous candidate but better RCU-counter;
- * and the payload exists.
- */
- /* Save this position as the best candidate. */
- *buffer_position = i;
- time_stamp_diff = new_diff;
- *payload_type = buffer_inst->payloadType[i];
- rcu_payload_cntr = buffer_inst->rcuPlCntr[i];
- }
- }
- } else {
- for (i = 0; i < buffer_inst->maxInsertPositions; ++i) {
- /* Calculate difference between this slot and current_time_stamp. */
- new_diff = (int32_t)(buffer_inst->timeStamp[i] - current_time_stamp);
-
- /* Check if this is the oldest packet. */
- if (((new_diff < time_stamp_diff)
- || ((new_diff == time_stamp_diff)
- && (buffer_inst->rcuPlCntr[i] < rcu_payload_cntr)))
- && (buffer_inst->payloadLengthBytes[i] > 0)) {
- /* New diff is smaller than previous diffs or we have a candidate with a
- * time_stamp as previous candidate but better RCU-counter;
- * and the payload exists.
- */
- /* Save this position as the best candidate. */
- *buffer_position = i;
- time_stamp_diff = new_diff;
- *payload_type = buffer_inst->payloadType[i];
- rcu_payload_cntr = buffer_inst->rcuPlCntr[i];
- }
- }
- }
-
- /* Check that we did find a real position. */
- if (*buffer_position >= 0) {
- /* Get the time_stamp for the best position. */
- *time_stamp = buffer_inst->timeStamp[*buffer_position];
- }
-
- return 0;
-}
-
-int WebRtcNetEQ_PacketBufferGetPacketSize(const PacketBuf_t* buffer_inst,
- int buffer_pos,
- const CodecDbInst_t* codec_database,
- int codec_pos, int last_duration,
- int av_sync) {
- if (codec_database->funcDurationEst[codec_pos] == NULL) {
- return last_duration;
- }
-
- if (av_sync != 0 &&
- WebRtcNetEQ_IsSyncPayload(buffer_inst->payloadLocation[buffer_pos],
- buffer_inst->payloadLengthBytes[buffer_pos])) {
- // In AV-sync and sync payload, report |last_duration| as current duration.
- return last_duration;
- }
-
- return (*codec_database->funcDurationEst[codec_pos])(
- codec_database->codec_state[codec_pos],
- (const uint8_t *)buffer_inst->payloadLocation[buffer_pos],
- buffer_inst->payloadLengthBytes[buffer_pos]);
-}
-
-int32_t WebRtcNetEQ_PacketBufferGetSize(const PacketBuf_t* buffer_inst,
- const CodecDbInst_t* codec_database,
- int av_sync) {
- int i, count;
- int last_duration;
- int last_codec_pos;
- int last_payload_type;
- int32_t size_samples;
-
- count = 0;
- last_duration = buffer_inst->packSizeSamples;
- last_codec_pos = -1;
- last_payload_type = -1;
- size_samples = 0;
-
- /* Loop through all slots in the buffer */
- for (i = 0; i < buffer_inst->maxInsertPositions; i++) {
- /* Only count the packets with non-zero size */
- if (buffer_inst->payloadLengthBytes[i] != 0) {
- int payload_type;
- int codec_pos;
- /* Figure out the codec database entry for this payload_type. */
- payload_type = buffer_inst->payloadType[i];
- /* Remember the last one, to avoid the database search. */
- if(payload_type == last_payload_type) {
- codec_pos = last_codec_pos;
- }
- else {
- codec_pos = WebRtcNetEQ_DbGetCodec(codec_database,
- payload_type);
- if (codec_pos >= 0) {
- codec_pos = codec_database->position[codec_pos];
- }
- }
- last_codec_pos = codec_pos;
- last_payload_type = payload_type;
- if (codec_pos >= 0) {
- /*
- * Right now WebRtcNetEQ_PacketBufferGetPacketSize either always
- * returns last_duration or always computes the real duration without
- * looking at last_duration. If an implementation really wanted to use
- * last_duration to compute a changing duration, we would have to
- * iterate through the packets in chronological order by timestamp.
- */
- /* Check for error before setting. */
- int temp_last_duration = WebRtcNetEQ_PacketBufferGetPacketSize(
- buffer_inst, i, codec_database, codec_pos,
- last_duration, av_sync);
- if (temp_last_duration >= 0)
- last_duration = temp_last_duration;
- }
- /* Add in the size of this packet. */
- size_samples += last_duration;
- count++;
- }
- }
-
- /* Sanity check; size cannot be negative */
- if (size_samples < 0) {
- size_samples = 0;
- }
- return size_samples;
-}
-
-void WebRtcNetEQ_IncrementWaitingTimes(PacketBuf_t *buffer_inst) {
- int i;
- /* Loop through all slots in the buffer. */
- for (i = 0; i < buffer_inst->maxInsertPositions; ++i) {
- /* Only increment waiting time for the packets with non-zero size. */
- if (buffer_inst->payloadLengthBytes[i] != 0) {
- buffer_inst->waitingTime[i]++;
- }
- }
-}
-
-int WebRtcNetEQ_GetDefaultCodecSettings(const enum WebRtcNetEQDecoder *codecID,
- int noOfCodecs, int *maxBytes,
- int *maxSlots,
- int* per_slot_overhead_bytes)
-{
- int i;
- int ok = 0;
- int16_t w16_tmp;
- int16_t codecBytes;
- int16_t codecBuffers;
-
- /* Initialize return variables to zero */
- *maxBytes = 0;
- *maxSlots = 0;
-
- /* Loop through all codecs supplied to function */
- for (i = 0; i < noOfCodecs; i++)
- {
- /* Find current codec and set parameters accordingly */
-
- if ((codecID[i] == kDecoderPCMu) || (codecID[i] == kDecoderPCMu_2ch))
- {
- codecBytes = 1680; /* Up to 210ms @ 64kbps */
- codecBuffers = 30; /* Down to 5ms frames */
- }
- else if ((codecID[i] == kDecoderPCMa) ||
- (codecID[i] == kDecoderPCMa_2ch))
- {
- codecBytes = 1680; /* Up to 210ms @ 64kbps */
- codecBuffers = 30; /* Down to 5ms frames */
- }
- else if (codecID[i] == kDecoderILBC)
- {
- codecBytes = 380; /* 200ms @ 15.2kbps (20ms frames) */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderISAC)
- {
- codecBytes = 960; /* 240ms @ 32kbps (60ms frames) */
- codecBuffers = 8;
- }
- else if ((codecID[i] == kDecoderISACswb) ||
- (codecID[i] == kDecoderISACfb))
- {
- codecBytes = 1560; /* 240ms @ 52kbps (30ms frames) */
- codecBuffers = 8;
- }
- else if (codecID[i] == kDecoderOpus)
- {
- codecBytes = 15300; /* 240ms @ 510kbps (60ms frames) */
- codecBuffers = 30; /* Replicating the value for PCMu/a */
- }
- else if ((codecID[i] == kDecoderPCM16B) ||
- (codecID[i] == kDecoderPCM16B_2ch))
- {
- codecBytes = 3360; /* 210ms */
- codecBuffers = 15;
- }
- else if ((codecID[i] == kDecoderPCM16Bwb) ||
- (codecID[i] == kDecoderPCM16Bwb_2ch))
- {
- codecBytes = 6720; /* 210ms */
- codecBuffers = 15;
- }
- else if ((codecID[i] == kDecoderPCM16Bswb32kHz) ||
- (codecID[i] == kDecoderPCM16Bswb32kHz_2ch))
- {
- codecBytes = 13440; /* 210ms */
- codecBuffers = 15;
- }
- else if (codecID[i] == kDecoderPCM16Bswb48kHz)
- {
- codecBytes = 20160; /* 210ms */
- codecBuffers = 15;
- }
- else if ((codecID[i] == kDecoderG722) ||
- (codecID[i] == kDecoderG722_2ch))
- {
- codecBytes = 1680; /* 210ms @ 64kbps */
- codecBuffers = 15;
- }
- else if (codecID[i] == kDecoderRED)
- {
- codecBytes = 0; /* Should not be max... */
- codecBuffers = 0;
- }
- else if (codecID[i] == kDecoderAVT)
- {
- codecBytes = 0; /* Should not be max... */
- codecBuffers = 0;
- }
- else if (codecID[i] == kDecoderCNG)
- {
- codecBytes = 0; /* Should not be max... */
- codecBuffers = 0;
- }
- else if (codecID[i] == kDecoderG729)
- {
- codecBytes = 210; /* 210ms @ 8kbps */
- codecBuffers = 20; /* max 200ms supported for 10ms frames */
- }
- else if (codecID[i] == kDecoderG729_1)
- {
- codecBytes = 840; /* 210ms @ 32kbps */
- codecBuffers = 10; /* max 200ms supported for 20ms frames */
- }
- else if (codecID[i] == kDecoderG726_16)
- {
- codecBytes = 400; /* 200ms @ 16kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderG726_24)
- {
- codecBytes = 600; /* 200ms @ 24kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderG726_32)
- {
- codecBytes = 800; /* 200ms @ 32kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderG726_40)
- {
- codecBytes = 1000; /* 200ms @ 40kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderG722_1_16)
- {
- codecBytes = 420; /* 210ms @ 16kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderG722_1_24)
- {
- codecBytes = 630; /* 210ms @ 24kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderG722_1_32)
- {
- codecBytes = 840; /* 210ms @ 32kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderG722_1C_24)
- {
- codecBytes = 630; /* 210ms @ 24kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderG722_1C_32)
- {
- codecBytes = 840; /* 210ms @ 32kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderG722_1C_48)
- {
- codecBytes = 1260; /* 210ms @ 48kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderSPEEX_8)
- {
- codecBytes = 1250; /* 210ms @ 50kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderSPEEX_16)
- {
- codecBytes = 1250; /* 210ms @ 50kbps */
- codecBuffers = 10;
- }
- else if ((codecID[i] == kDecoderCELT_32) ||
- (codecID[i] == kDecoderCELT_32_2ch))
- {
- codecBytes = 1250; /* 210ms @ 50kbps */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderGSMFR)
- {
- codecBytes = 340; /* 200ms */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderAMR)
- {
- codecBytes = 384; /* 240ms @ 12.2kbps+headers (60ms frames) */
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderAMRWB)
- {
- codecBytes = 744;
- codecBuffers = 10;
- }
- else if (codecID[i] == kDecoderArbitrary)
- {
- codecBytes = 6720; /* Assume worst case uncompressed WB 210ms */
- codecBuffers = 15;
- }
- else
- {
- /*Unknow codec */
- codecBytes = 0;
- codecBuffers = 0;
- ok = CODEC_DB_UNKNOWN_CODEC;
- }
-
- /* Update max variables */
- *maxBytes = WEBRTC_SPL_MAX((*maxBytes), codecBytes);
- *maxSlots = WEBRTC_SPL_MAX((*maxSlots), codecBuffers);
-
- } /* end of for loop */
-
- /*
- * Add size needed by the additional pointers for each slot inside struct,
- * as indicated on each line below.
- */
- w16_tmp = (sizeof(uint32_t) /* timeStamp */
- + sizeof(int16_t*) /* payloadLocation */
- + sizeof(uint16_t) /* seqNumber */
- + sizeof(int16_t) /* payloadType */
- + sizeof(int16_t) /* payloadLengthBytes */
- + sizeof(int16_t) /* rcuPlCntr */
- + sizeof(int)); /* waitingTime */
- /* Add the extra size per slot to the memory count */
- *maxBytes += w16_tmp * (*maxSlots);
-
- *per_slot_overhead_bytes = w16_tmp;
- return ok;
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.cc
index d19abbaa84d..8a81c2598bc 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.cc
@@ -12,12 +12,12 @@
// an STL list. The list is kept sorted at all times so that the next packet to
// decode is at the beginning of the list.
-#include "webrtc/modules/audio_coding/neteq4/packet_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
#include <algorithm> // find_if()
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
namespace webrtc {
@@ -36,14 +36,8 @@ class NewTimestampIsLarger {
const Packet* new_packet_;
};
-// Constructor. The arguments define the maximum number of slots and maximum
-// payload memory (excluding RTP headers) that the buffer will accept.
-PacketBuffer::PacketBuffer(size_t max_number_of_packets,
- size_t max_memory_bytes)
- : max_number_of_packets_(max_number_of_packets),
- max_memory_bytes_(max_memory_bytes),
- current_memory_bytes_(0) {
-}
+PacketBuffer::PacketBuffer(size_t max_number_of_packets)
+ : max_number_of_packets_(max_number_of_packets) {}
// Destructor. All packets in the buffer will be destroyed.
PacketBuffer::~PacketBuffer() {
@@ -53,7 +47,6 @@ PacketBuffer::~PacketBuffer() {
// Flush the buffer. All packets in the buffer will be destroyed.
void PacketBuffer::Flush() {
DeleteAllPackets(&buffer_);
- current_memory_bytes_ = 0;
}
int PacketBuffer::InsertPacket(Packet* packet) {
@@ -66,22 +59,10 @@ int PacketBuffer::InsertPacket(Packet* packet) {
int return_val = kOK;
- if ((buffer_.size() >= max_number_of_packets_) ||
- (current_memory_bytes_ + packet->payload_length
- > static_cast<int>(max_memory_bytes_))) {
+ if (buffer_.size() >= max_number_of_packets_) {
// Buffer is full. Flush it.
Flush();
return_val = kFlushed;
- if ((buffer_.size() >= max_number_of_packets_) ||
- (current_memory_bytes_ + packet->payload_length
- > static_cast<int>(max_memory_bytes_))) {
- // Buffer is still too small for the packet. Either the buffer limits are
- // really small, or the packet is really large. Delete the packet and
- // return an error.
- delete [] packet->payload;
- delete packet;
- return kOversizePacket;
- }
}
// Get an iterator pointing to the place in the buffer where the new packet
@@ -91,7 +72,6 @@ int PacketBuffer::InsertPacket(Packet* packet) {
buffer_.rbegin(), buffer_.rend(),
NewTimestampIsLarger(packet));
buffer_.insert(rit.base(), packet); // Insert the packet at that position.
- current_memory_bytes_ += packet->payload_length;
return return_val;
}
@@ -183,8 +163,6 @@ Packet* PacketBuffer::GetNextPacket(int* discard_count) {
// Assert that the packet sanity checks in InsertPacket method works.
assert(packet && packet->payload);
buffer_.pop_front();
- current_memory_bytes_ -= packet->payload_length;
- assert(current_memory_bytes_ >= 0); // Assert bookkeeping is correct.
// Discard other packets with the same timestamp. These are duplicates or
// redundant payloads that should not be used.
if (discard_count) {
@@ -206,17 +184,14 @@ int PacketBuffer::DiscardNextPacket() {
if (Empty()) {
return kBufferEmpty;
}
- Packet* temp_packet = buffer_.front();
// Assert that the packet sanity checks in InsertPacket method works.
- assert(temp_packet && temp_packet->payload);
- current_memory_bytes_ -= temp_packet->payload_length;
- assert(current_memory_bytes_ >= 0); // Assert bookkeeping is correct.
+ assert(buffer_.front());
+ assert(buffer_.front()->payload);
DeleteFirstPacket(&buffer_);
return kOK;
}
int PacketBuffer::DiscardOldPackets(uint32_t timestamp_limit) {
- int discard_count = 0;
while (!Empty() &&
timestamp_limit != buffer_.front()->header.timestamp &&
static_cast<uint32_t>(timestamp_limit
@@ -225,7 +200,6 @@ int PacketBuffer::DiscardOldPackets(uint32_t timestamp_limit) {
if (DiscardNextPacket() != kOK) {
assert(false); // Must be ok by design.
}
- ++discard_count;
}
return 0;
}
@@ -240,8 +214,15 @@ int PacketBuffer::NumSamplesInBuffer(DecoderDatabase* decoder_database,
AudioDecoder* decoder =
decoder_database->GetDecoder(packet->header.payloadType);
if (decoder) {
- int duration = packet->sync_packet ? last_duration :
- decoder->PacketDuration(packet->payload, packet->payload_length);
+ int duration;
+ if (packet->sync_packet) {
+ duration = last_duration;
+ } else if (packet->primary) {
+ duration =
+ decoder->PacketDuration(packet->payload, packet->payload_length);
+ } else {
+ continue;
+ }
if (duration >= 0) {
last_duration = duration; // Save the most up-to-date (valid) duration.
}
@@ -275,14 +256,9 @@ void PacketBuffer::DeleteAllPackets(PacketList* packet_list) {
}
}
-void PacketBuffer::BufferStat(int* num_packets,
- int* max_num_packets,
- int* current_memory_bytes,
- int* max_memory_bytes) const {
+void PacketBuffer::BufferStat(int* num_packets, int* max_num_packets) const {
*num_packets = static_cast<int>(buffer_.size());
*max_num_packets = static_cast<int>(max_number_of_packets_);
- *current_memory_bytes = current_memory_bytes_;
- *max_memory_bytes = static_cast<int>(max_memory_bytes_);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h
index 61ff2b970fd..76c4ddd161d 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -8,247 +8,128 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-/*
- * Interface for the actual packet buffer data structure.
- */
-
-#ifndef PACKET_BUFFER_H
-#define PACKET_BUFFER_H
-
-#include "typedefs.h"
-
-#include "webrtc_neteq.h"
-#include "rtp.h"
-
-/* Define minimum allowed buffer memory, in 16-bit words */
-#define PBUFFER_MIN_MEMORY_SIZE 150
-
-/****************************/
-/* The packet buffer struct */
-/****************************/
-
-typedef struct
-{
-
- /* Variables common to the entire buffer */
- uint16_t packSizeSamples; /* packet size in samples of last decoded packet */
- int16_t *startPayloadMemory; /* pointer to the payload memory */
- int memorySizeW16; /* the size (in int16_t) of the payload memory */
- int16_t *currentMemoryPos; /* The memory position to insert next payload */
- int numPacketsInBuffer; /* The number of packets in the buffer */
- int insertPosition; /* The position to insert next packet */
- int maxInsertPositions; /* Maximum number of packets allowed */
-
- /* Arrays with one entry per packet slot */
- /* NOTE: If these are changed, the changes must be accounted for at the end of
- the function WebRtcNetEQ_GetDefaultCodecSettings(). */
- uint32_t *timeStamp; /* Timestamp in slot n */
- int16_t **payloadLocation; /* Memory location of payload in slot n */
- uint16_t *seqNumber; /* Sequence number in slot n */
- int16_t *payloadType; /* Payload type of packet in slot n */
- int16_t *payloadLengthBytes; /* Payload length of packet in slot n */
- int16_t *rcuPlCntr; /* zero for non-RCU payload, 1 for main payload
- 2 for redundant payload */
- int *waitingTime;
-
- /* Statistics counter */
- uint16_t discardedPackets; /* Number of discarded packets */
-
-} PacketBuf_t;
-
-/*************************/
-/* Function declarations */
-/*************************/
-
-/****************************************************************************
- * WebRtcNetEQ_PacketBufferInit(...)
- *
- * This function initializes the packet buffer.
- *
- * Input:
- * - bufferInst : Buffer instance to be initialized
- * - noOfPackets : Maximum number of packets that buffer should hold
- * - memory : Pointer to the storage memory for the payloads
- * - memorySize : The size of the payload memory (in int16_t)
- *
- * Output:
- * - bufferInst : Updated buffer instance
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_PacketBufferInit(PacketBuf_t *bufferInst, int maxNoOfPackets,
- int16_t *pw16_memory, int memorySize);
-
-/****************************************************************************
- * WebRtcNetEQ_PacketBufferFlush(...)
- *
- * This function flushes all the packets in the buffer.
- *
- * Input:
- * - bufferInst : Buffer instance to be flushed
- *
- * Output:
- * - bufferInst : Flushed buffer instance
- *
- * Return value : 0 - Ok
- */
-
-int WebRtcNetEQ_PacketBufferFlush(PacketBuf_t *bufferInst);
-
-/****************************************************************************
- * WebRtcNetEQ_PacketBufferInsert(...)
- *
- * This function inserts an RTP packet into the packet buffer.
- *
- * Input:
- * - bufferInst : Buffer instance
- * - RTPpacket : An RTP packet struct (with payload, sequence
- * number, etc.)
- * - av_sync : 1 indicates AV-sync enabled, 0 disabled.
- *
- * Output:
- * - bufferInst : Updated buffer instance
- * - flushed : 1 if buffer was flushed, 0 otherwise
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_PacketBufferInsert(PacketBuf_t *bufferInst, const RTPPacket_t *RTPpacket,
- int16_t *flushed, int av_sync);
-
-/****************************************************************************
- * WebRtcNetEQ_PacketBufferExtract(...)
- *
- * This function extracts a payload from the buffer.
- *
- * Input:
- * - bufferInst : Buffer instance
- * - bufferPosition: Position of the packet that should be extracted
- *
- * Output:
- * - RTPpacket : An RTP packet struct (with payload, sequence
- * number, etc)
- * - bufferInst : Updated buffer instance
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_PacketBufferExtract(PacketBuf_t *bufferInst, RTPPacket_t *RTPpacket,
- int bufferPosition, int *waitingTime);
-
-/****************************************************************************
- * WebRtcNetEQ_PacketBufferFindLowestTimestamp(...)
- *
- * This function finds the next packet with the lowest timestamp.
- *
- * Input:
- * - buffer_inst : Buffer instance.
- * - current_time_stamp : The timestamp to compare packet timestamps with.
- * - erase_old_packets : If non-zero, erase packets older than currentTS.
- *
- * Output:
- * - time_stamp : Lowest timestamp that was found.
- * - buffer_position : Position of this packet (-1 if there are no
- * packets in the buffer).
- * - payload_type : Payload type of the found payload.
- *
- * Return value : 0 - Ok;
- * < 0 - Error.
- */
-
-int WebRtcNetEQ_PacketBufferFindLowestTimestamp(PacketBuf_t* buffer_inst,
- uint32_t current_time_stamp,
- uint32_t* time_stamp,
- int* buffer_position,
- int erase_old_packets,
- int16_t* payload_type);
-
-/****************************************************************************
- * WebRtcNetEQ_PacketBufferGetPacketSize(...)
- *
- * Calculate and return an estimate of the data length (in samples) of the
- * given packet. If no estimate is available (because we do not know how to
- * compute packet durations for the associated payload type), last_duration
- * will be returned instead.
- *
- * Input:
- * - buffer_inst : Buffer instance
- * - buffer_pos : The index of the buffer of which to estimate the
- * duration
- * - codec_database : Codec database instance
- * - codec_pos : The codec database entry associated with the payload
- * type of the specified buffer.
- * - last_duration : The duration of the previous frame.
- * - av_sync : 1 indicates AV-sync enabled, 0 disabled.
- *
- * Return value : The buffer size in samples
- */
-
-int WebRtcNetEQ_PacketBufferGetPacketSize(const PacketBuf_t* buffer_inst,
- int buffer_pos,
- const CodecDbInst_t* codec_database,
- int codec_pos, int last_duration,
- int av_sync);
-
-/****************************************************************************
- * WebRtcNetEQ_PacketBufferGetSize(...)
- *
- * Calculate and return an estimate of the total data length (in samples)
- * currently in the buffer. The estimate is calculated as the number of
- * packets currently in the buffer (which does not have any remaining waiting
- * time), multiplied with the number of samples obtained from the last
- * decoded packet.
- *
- * Input:
- * - buffer_inst : Buffer instance
- * - codec_database : Codec database instance
- * - av_sync : 1 indicates AV-sync enabled, 0 disabled.
- *
- * Return value : The buffer size in samples
- */
-
-int32_t WebRtcNetEQ_PacketBufferGetSize(const PacketBuf_t* buffer_inst,
- const CodecDbInst_t* codec_database,
- int av_sync);
-
-/****************************************************************************
- * WebRtcNetEQ_IncrementWaitingTimes(...)
- *
- * Increment the waiting time for all packets in the buffer by one.
- *
- * Input:
- * - bufferInst : Buffer instance
- *
- * Return value : n/a
- */
-
-void WebRtcNetEQ_IncrementWaitingTimes(PacketBuf_t *buffer_inst);
-
-/****************************************************************************
- * WebRtcNetEQ_GetDefaultCodecSettings(...)
- *
- * Calculates a recommended buffer size for a specific set of codecs.
- *
- * Input:
- * - codecID : An array of codec types that will be used
- * - noOfCodecs : Number of codecs in array codecID
- *
- * Output:
- * - maxBytes : Recommended buffer memory size in bytes
- * - maxSlots : Recommended number of slots in buffer
- * - per_slot_overhead_bytes : overhead in bytes for each slot in buffer.
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_GetDefaultCodecSettings(const enum WebRtcNetEQDecoder *codecID,
- int noOfCodecs, int *maxBytes,
- int *maxSlots,
- int* per_slot_overhead_bytes);
-
-#endif /* PACKET_BUFFER_H */
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_PACKET_BUFFER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_PACKET_BUFFER_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declaration.
+class DecoderDatabase;
+
+// This is the actual buffer holding the packets before decoding.
+class PacketBuffer {
+ public:
+ enum BufferReturnCodes {
+ kOK = 0,
+ kFlushed,
+ kNotFound,
+ kBufferEmpty,
+ kInvalidPacket,
+ kInvalidPointer
+ };
+
+ // Constructor creates a buffer which can hold a maximum of
+ // |max_number_of_packets| packets.
+ PacketBuffer(size_t max_number_of_packets);
+
+ // Deletes all packets in the buffer before destroying the buffer.
+ virtual ~PacketBuffer();
+
+ // Flushes the buffer and deletes all packets in it.
+ virtual void Flush();
+
+ // Returns true for an empty buffer.
+ virtual bool Empty() const { return buffer_.empty(); }
+
+ // Inserts |packet| into the buffer. The buffer will take over ownership of
+ // the packet object.
+ // Returns PacketBuffer::kOK on success, PacketBuffer::kFlushed if the buffer
+ // was flushed due to overfilling.
+ virtual int InsertPacket(Packet* packet);
+
+ // Inserts a list of packets into the buffer. The buffer will take over
+ // ownership of the packet objects.
+ // Returns PacketBuffer::kOK if all packets were inserted successfully.
+ // If the buffer was flushed due to overfilling, only a subset of the list is
+ // inserted, and PacketBuffer::kFlushed is returned.
+ // The last three parameters are included for legacy compatibility.
+ // TODO(hlundin): Redesign to not use current_*_payload_type and
+ // decoder_database.
+ virtual int InsertPacketList(PacketList* packet_list,
+ const DecoderDatabase& decoder_database,
+ uint8_t* current_rtp_payload_type,
+ uint8_t* current_cng_rtp_payload_type);
+
+ // Gets the timestamp for the first packet in the buffer and writes it to the
+ // output variable |next_timestamp|.
+ // Returns PacketBuffer::kBufferEmpty if the buffer is empty,
+ // PacketBuffer::kOK otherwise.
+ virtual int NextTimestamp(uint32_t* next_timestamp) const;
+
+ // Gets the timestamp for the first packet in the buffer with a timestamp no
+ // lower than the input limit |timestamp|. The result is written to the output
+ // variable |next_timestamp|.
+ // Returns PacketBuffer::kBufferEmpty if the buffer is empty,
+ // PacketBuffer::kOK otherwise.
+ virtual int NextHigherTimestamp(uint32_t timestamp,
+ uint32_t* next_timestamp) const;
+
+ // Returns a (constant) pointer the RTP header of the first packet in the
+ // buffer. Returns NULL if the buffer is empty.
+ virtual const RTPHeader* NextRtpHeader() const;
+
+ // Extracts the first packet in the buffer and returns a pointer to it.
+ // Returns NULL if the buffer is empty. The caller is responsible for deleting
+ // the packet.
+ // Subsequent packets with the same timestamp as the one extracted will be
+ // discarded and properly deleted. The number of discarded packets will be
+ // written to the output variable |discard_count|.
+ virtual Packet* GetNextPacket(int* discard_count);
+
+ // Discards the first packet in the buffer. The packet is deleted.
+ // Returns PacketBuffer::kBufferEmpty if the buffer is empty,
+ // PacketBuffer::kOK otherwise.
+ virtual int DiscardNextPacket();
+
+ // Discards all packets that are (strictly) older than |timestamp_limit|.
+ // Returns number of packets discarded.
+ virtual int DiscardOldPackets(uint32_t timestamp_limit);
+
+ // Returns the number of packets in the buffer, including duplicates and
+ // redundant packets.
+ virtual int NumPacketsInBuffer() const {
+ return static_cast<int>(buffer_.size());
+ }
+
+ // Returns the number of samples in the buffer, including samples carried in
+ // duplicate and redundant packets.
+ virtual int NumSamplesInBuffer(DecoderDatabase* decoder_database,
+ int last_decoded_length) const;
+
+ // Increase the waiting time counter for every packet in the buffer by |inc|.
+ // The default value for |inc| is 1.
+ virtual void IncrementWaitingTimes(int inc = 1);
+
+ virtual void BufferStat(int* num_packets, int* max_num_packets) const;
+
+ // Static method that properly deletes the first packet, and its payload
+ // array, in |packet_list|. Returns false if |packet_list| already was empty,
+ // otherwise true.
+ static bool DeleteFirstPacket(PacketList* packet_list);
+
+ // Static method that properly deletes all packets, and their payload arrays,
+ // in |packet_list|.
+ static void DeleteAllPackets(PacketList* packet_list);
+
+ private:
+ size_t max_number_of_packets_;
+ PacketList buffer_;
+ DISALLOW_COPY_AND_ASSIGN(PacketBuffer);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_PACKET_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc
index c8109dc6dff..5e6b89fdc4b 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/packet_buffer_unittest.cc
@@ -10,12 +10,12 @@
// Unit tests for PacketBuffer class.
-#include "webrtc/modules/audio_coding/neteq4/packet_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/packet_buffer.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/packet.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h"
using ::testing::Return;
using ::testing::_;
@@ -70,13 +70,13 @@ void PacketGenerator::SkipPacket() {
// Start of test definitions.
TEST(PacketBuffer, CreateAndDestroy) {
- PacketBuffer* buffer = new PacketBuffer(10, 1000); // 10 packets, 1000 bytes.
+ PacketBuffer* buffer = new PacketBuffer(10); // 10 packets.
EXPECT_TRUE(buffer->Empty());
delete buffer;
}
TEST(PacketBuffer, InsertPacket) {
- PacketBuffer buffer(10, 1000); // 10 packets, 1000 bytes.
+ PacketBuffer buffer(10); // 10 packets.
PacketGenerator gen(17u, 4711u, 0, 10);
const int payload_len = 100;
@@ -88,7 +88,6 @@ TEST(PacketBuffer, InsertPacket) {
EXPECT_EQ(4711u, next_ts);
EXPECT_FALSE(buffer.Empty());
EXPECT_EQ(1, buffer.NumPacketsInBuffer());
- EXPECT_EQ(payload_len, buffer.current_memory_bytes());
const RTPHeader* hdr = buffer.NextRtpHeader();
EXPECT_EQ(&(packet->header), hdr); // Compare pointer addresses.
@@ -98,7 +97,7 @@ TEST(PacketBuffer, InsertPacket) {
// Test to flush buffer.
TEST(PacketBuffer, FlushBuffer) {
- PacketBuffer buffer(10, 1000); // 10 packets, 1000 bytes.
+ PacketBuffer buffer(10); // 10 packets.
PacketGenerator gen(0, 0, 0, 10);
const int payload_len = 10;
@@ -109,18 +108,16 @@ TEST(PacketBuffer, FlushBuffer) {
}
EXPECT_EQ(10, buffer.NumPacketsInBuffer());
EXPECT_FALSE(buffer.Empty());
- EXPECT_EQ(10 * payload_len, buffer.current_memory_bytes());
buffer.Flush();
// Buffer should delete the payloads itself.
EXPECT_EQ(0, buffer.NumPacketsInBuffer());
EXPECT_TRUE(buffer.Empty());
- EXPECT_EQ(0, buffer.current_memory_bytes());
}
// Test to fill the buffer over the limits, and verify that it flushes.
TEST(PacketBuffer, OverfillBuffer) {
- PacketBuffer buffer(10, 1000); // 10 packets, 1000 bytes.
+ PacketBuffer buffer(10); // 10 packets.
PacketGenerator gen(0, 0, 0, 10);
// Insert 10 small packets; should be ok.
@@ -131,7 +128,6 @@ TEST(PacketBuffer, OverfillBuffer) {
EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacket(packet));
}
EXPECT_EQ(10, buffer.NumPacketsInBuffer());
- EXPECT_EQ(10 * payload_len, buffer.current_memory_bytes());
uint32_t next_ts;
EXPECT_EQ(PacketBuffer::kOK, buffer.NextTimestamp(&next_ts));
EXPECT_EQ(0u, next_ts); // Expect first inserted packet to be first in line.
@@ -140,30 +136,17 @@ TEST(PacketBuffer, OverfillBuffer) {
Packet* packet = gen.NextPacket(payload_len);
EXPECT_EQ(PacketBuffer::kFlushed, buffer.InsertPacket(packet));
EXPECT_EQ(1, buffer.NumPacketsInBuffer());
- EXPECT_EQ(payload_len, buffer.current_memory_bytes());
EXPECT_EQ(PacketBuffer::kOK, buffer.NextTimestamp(&next_ts));
// Expect last inserted packet to be first in line.
EXPECT_EQ(packet->header.timestamp, next_ts);
- // Insert 2 large packets; expect to flush when inserting the second one.
- const int large_payload_len = 500;
- packet = gen.NextPacket(large_payload_len);
- EXPECT_EQ(PacketBuffer::kOK, buffer.InsertPacket(packet));
- EXPECT_EQ(2, buffer.NumPacketsInBuffer());
- EXPECT_EQ(payload_len + large_payload_len, buffer.current_memory_bytes());
-
- packet = gen.NextPacket(large_payload_len);
- EXPECT_EQ(PacketBuffer::kFlushed, buffer.InsertPacket(packet));
- EXPECT_EQ(1, buffer.NumPacketsInBuffer());
- EXPECT_EQ(large_payload_len, buffer.current_memory_bytes());
-
- // Flush buffer to delete remaining packets.
+ // Flush buffer to delete all packets.
buffer.Flush();
}
// Test inserting a list of packets.
TEST(PacketBuffer, InsertPacketList) {
- PacketBuffer buffer(10, 1000); // 10 packets, 1000 bytes.
+ PacketBuffer buffer(10); // 10 packets.
PacketGenerator gen(0, 0, 0, 10);
PacketList list;
const int payload_len = 10;
@@ -187,7 +170,6 @@ TEST(PacketBuffer, InsertPacketList) {
&current_cng_pt));
EXPECT_TRUE(list.empty()); // The PacketBuffer should have depleted the list.
EXPECT_EQ(10, buffer.NumPacketsInBuffer());
- EXPECT_EQ(10 * payload_len, buffer.current_memory_bytes());
EXPECT_EQ(0, current_pt); // Current payload type changed to 0.
EXPECT_EQ(0xFF, current_cng_pt); // CNG payload type not changed.
@@ -200,7 +182,7 @@ TEST(PacketBuffer, InsertPacketList) {
// Expecting the buffer to flush.
// TODO(hlundin): Remove this test when legacy operation is no longer needed.
TEST(PacketBuffer, InsertPacketListChangePayloadType) {
- PacketBuffer buffer(10, 1000); // 10 packets, 1000 bytes.
+ PacketBuffer buffer(10); // 10 packets.
PacketGenerator gen(0, 0, 0, 10);
PacketList list;
const int payload_len = 10;
@@ -229,7 +211,6 @@ TEST(PacketBuffer, InsertPacketListChangePayloadType) {
&current_cng_pt));
EXPECT_TRUE(list.empty()); // The PacketBuffer should have depleted the list.
EXPECT_EQ(1, buffer.NumPacketsInBuffer()); // Only the last packet.
- EXPECT_EQ(1 * payload_len, buffer.current_memory_bytes());
EXPECT_EQ(1, current_pt); // Current payload type changed to 0.
EXPECT_EQ(0xFF, current_cng_pt); // CNG payload type not changed.
@@ -252,7 +233,7 @@ TEST(PacketBuffer, InsertPacketListChangePayloadType) {
// 8 0x0005 0x00000028 0x0000001E
// 9 0x0006 0x00000032 0x00000028
TEST(PacketBuffer, ExtractOrderRedundancy) {
- PacketBuffer buffer(100, 1000); // 100 packets, 1000 bytes.
+ PacketBuffer buffer(100); // 100 packets.
const uint32_t ts_increment = 10; // Samples per packet.
const uint16_t start_seq_no = 0xFFFF - 2; // Wraps after 3 packets.
const uint32_t start_ts = 0xFFFFFFFF -
@@ -321,7 +302,7 @@ TEST(PacketBuffer, ExtractOrderRedundancy) {
}
TEST(PacketBuffer, DiscardPackets) {
- PacketBuffer buffer(100, 1000); // 100 packets, 1000 bytes.
+ PacketBuffer buffer(100); // 100 packets.
const uint16_t start_seq_no = 17;
const uint32_t start_ts = 4711;
const uint32_t ts_increment = 10;
@@ -335,7 +316,6 @@ TEST(PacketBuffer, DiscardPackets) {
buffer.InsertPacket(packet);
}
EXPECT_EQ(10, buffer.NumPacketsInBuffer());
- EXPECT_EQ(10 * payload_len, buffer.current_memory_bytes());
// Discard them one by one and make sure that the right packets are at the
// front of the buffer.
@@ -351,7 +331,7 @@ TEST(PacketBuffer, DiscardPackets) {
}
TEST(PacketBuffer, Reordering) {
- PacketBuffer buffer(100, 1000); // 100 packets, 1000 bytes.
+ PacketBuffer buffer(100); // 100 packets.
const uint16_t start_seq_no = 17;
const uint32_t start_ts = 4711;
const uint32_t ts_increment = 10;
@@ -384,7 +364,6 @@ TEST(PacketBuffer, Reordering) {
&current_pt,
&current_cng_pt));
EXPECT_EQ(10, buffer.NumPacketsInBuffer());
- EXPECT_EQ(10 * payload_len, buffer.current_memory_bytes());
// Extract them and make sure that come out in the right order.
uint32_t current_ts = start_ts;
@@ -408,18 +387,8 @@ TEST(PacketBuffer, Failures) {
int payload_len = 100;
PacketGenerator gen(start_seq_no, start_ts, 0, ts_increment);
- PacketBuffer* buffer = new PacketBuffer(0, 1000); // 0 packets, 1000 bytes.
- Packet* packet = gen.NextPacket(payload_len);
- EXPECT_EQ(PacketBuffer::kOversizePacket, buffer->InsertPacket(packet));
- delete buffer;
-
- buffer = new PacketBuffer(100, 10); // 100 packets, 10 bytes.
- packet = gen.NextPacket(payload_len);
- EXPECT_EQ(PacketBuffer::kOversizePacket, buffer->InsertPacket(packet));
- delete buffer;
-
- buffer = new PacketBuffer(100, 10000); // 100 packets, 10000 bytes.
- packet = NULL;
+ PacketBuffer* buffer = new PacketBuffer(100); // 100 packets.
+ Packet* packet = NULL;
EXPECT_EQ(PacketBuffer::kInvalidPacket, buffer->InsertPacket(packet));
packet = gen.NextPacket(payload_len);
delete [] packet->payload;
@@ -448,7 +417,7 @@ TEST(PacketBuffer, Failures) {
// Insert packet list of three packets, where the second packet has an invalid
// payload. Expect first packet to be inserted, and the remaining two to be
// discarded.
- buffer = new PacketBuffer(100, 1000); // 100 packets, 1000 bytes.
+ buffer = new PacketBuffer(100); // 100 packets.
PacketList list;
list.push_back(gen.NextPacket(payload_len)); // Valid packet.
packet = gen.NextPacket(payload_len);
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.cc
index 56039a57ec6..1d61ef0cf40 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.cc
@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/payload_splitter.h"
+#include "webrtc/modules/audio_coding/neteq/payload_splitter.h"
#include <assert.h>
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
namespace webrtc {
@@ -119,6 +119,64 @@ int PayloadSplitter::SplitRed(PacketList* packet_list) {
return ret;
}
+int PayloadSplitter::SplitFec(PacketList* packet_list,
+ DecoderDatabase* decoder_database) {
+ PacketList::iterator it = packet_list->begin();
+ // Iterate through all packets in |packet_list|.
+ while (it != packet_list->end()) {
+ Packet* packet = (*it); // Just to make the notation more intuitive.
+ // Get codec type for this payload.
+ uint8_t payload_type = packet->header.payloadType;
+ const DecoderDatabase::DecoderInfo* info =
+ decoder_database->GetDecoderInfo(payload_type);
+ if (!info) {
+ return kUnknownPayloadType;
+ }
+ // No splitting for a sync-packet.
+ if (packet->sync_packet) {
+ ++it;
+ continue;
+ }
+
+ // Not an FEC packet.
+ AudioDecoder* decoder = decoder_database->GetDecoder(payload_type);
+ // decoder should not return NULL.
+ assert(decoder != NULL);
+ if (!decoder ||
+ !decoder->PacketHasFec(packet->payload, packet->payload_length)) {
+ ++it;
+ continue;
+ }
+
+ switch (info->codec_type) {
+ case kDecoderOpus:
+ case kDecoderOpus_2ch: {
+ Packet* new_packet = new Packet;
+
+ new_packet->header = packet->header;
+ int duration = decoder->
+ PacketDurationRedundant(packet->payload, packet->payload_length);
+ new_packet->header.timestamp -= duration;
+ new_packet->payload = new uint8_t[packet->payload_length];
+ memcpy(new_packet->payload, packet->payload, packet->payload_length);
+ new_packet->payload_length = packet->payload_length;
+ new_packet->primary = false;
+ new_packet->waiting_time = packet->waiting_time;
+ new_packet->sync_packet = packet->sync_packet;
+
+ packet_list->insert(it, new_packet);
+ break;
+ }
+ default: {
+ return kFecSplitError;
+ }
+ }
+
+ ++it;
+ }
+ return kOK;
+}
+
int PayloadSplitter::CheckRedPayloads(PacketList* packet_list,
const DecoderDatabase& decoder_database) {
PacketList::iterator it = packet_list->begin();
@@ -283,7 +341,7 @@ int PayloadSplitter::SplitAudio(PacketList* packet_list,
// increment it manually.
it = packet_list->erase(it);
}
- return 0;
+ return kOK;
}
void PayloadSplitter::SplitBySamples(const Packet* packet,
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.h
index 3768c2f2b1c..a3dd77e5a53 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter.h
@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PAYLOAD_SPLITTER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PAYLOAD_SPLITTER_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_PAYLOAD_SPLITTER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_PAYLOAD_SPLITTER_H_
-#include "webrtc/modules/audio_coding/neteq4/packet.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h"
namespace webrtc {
@@ -32,7 +32,8 @@ class PayloadSplitter {
kTooLargePayload = -1,
kFrameSplitError = -2,
kUnknownPayloadType = -3,
- kRedLengthMismatch = -4
+ kRedLengthMismatch = -4,
+ kFecSplitError = -5,
};
PayloadSplitter() {}
@@ -47,6 +48,12 @@ class PayloadSplitter {
// Returns kOK or an error.
virtual int SplitRed(PacketList* packet_list);
+ // Iterates through |packet_list| and, duplicate each audio payload that has
+ // FEC as new packet for redundant decoding. The decoder database is needed to
+ // get information about which payload type each packet contains.
+ virtual int SplitFec(PacketList* packet_list,
+ DecoderDatabase* decoder_database);
+
// Checks all packets in |packet_list|. Packets that are DTMF events or
// comfort noise payloads are kept. Except that, only one single payload type
// is accepted. Any packet with another payload type is discarded.
@@ -80,4 +87,4 @@ class PayloadSplitter {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PAYLOAD_SPLITTER_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_PAYLOAD_SPLITTER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc
index 5a7a6ca3e4e..5cde1bda5e5 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/payload_splitter_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/payload_splitter_unittest.cc
@@ -10,15 +10,15 @@
// Unit tests for PayloadSplitter class.
-#include "webrtc/modules/audio_coding/neteq4/payload_splitter.h"
+#include "webrtc/modules/audio_coding/neteq/payload_splitter.h"
#include <assert.h>
#include <utility> // pair
#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/packet.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
using ::testing::Return;
@@ -91,6 +91,34 @@ Packet* CreateRedPayload(int num_payloads,
return packet;
}
+
+// A possible Opus packet that contains FEC is the following.
+// The frame is 20 ms in duration.
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |0|0|0|0|1|0|0|0|x|1|x|x|x|x|x|x|x| |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |
+// | Compressed frame 1 (N-2 bytes)... :
+// : |
+// | |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+Packet* CreateOpusFecPacket(uint8_t payload_type, int payload_length,
+ uint8_t payload_value) {
+ Packet* packet = new Packet;
+ packet->header.payloadType = payload_type;
+ packet->header.timestamp = kBaseTimestamp;
+ packet->header.sequenceNumber = kSequenceNumber;
+ packet->payload_length = payload_length;
+ uint8_t* payload = new uint8_t[packet->payload_length];
+ payload[0] = 0x08;
+ payload[1] = 0x40;
+ memset(&payload[2], payload_value, payload_length - 2);
+ packet->payload = payload;
+ return packet;
+}
+
// Create a packet with all payload bytes set to |payload_value|.
Packet* CreatePacket(uint8_t payload_type, int payload_length,
uint8_t payload_value) {
@@ -691,4 +719,59 @@ TEST(IlbcPayloadSplitter, UnevenPayload) {
EXPECT_CALL(decoder_database, Die());
}
+TEST(FecPayloadSplitter, MixedPayload) {
+ PacketList packet_list;
+ DecoderDatabase decoder_database;
+
+ decoder_database.RegisterPayload(0, kDecoderOpus);
+ decoder_database.RegisterPayload(1, kDecoderPCMu);
+
+ Packet* packet = CreateOpusFecPacket(0, 10, 0xFF);
+ packet_list.push_back(packet);
+
+ packet = CreatePacket(0, 10, 0); // Non-FEC Opus payload.
+ packet_list.push_back(packet);
+
+ packet = CreatePacket(1, 10, 0); // Non-Opus payload.
+ packet_list.push_back(packet);
+
+ PayloadSplitter splitter;
+ EXPECT_EQ(PayloadSplitter::kOK,
+ splitter.SplitFec(&packet_list, &decoder_database));
+ EXPECT_EQ(4u, packet_list.size());
+
+ // Check first packet.
+ packet = packet_list.front();
+ EXPECT_EQ(0, packet->header.payloadType);
+ EXPECT_EQ(kBaseTimestamp - 20 * 32, packet->header.timestamp);
+ EXPECT_EQ(10, packet->payload_length);
+ EXPECT_FALSE(packet->primary);
+ delete [] packet->payload;
+ delete packet;
+ packet_list.pop_front();
+
+ // Check second packet.
+ packet = packet_list.front();
+ EXPECT_EQ(0, packet->header.payloadType);
+ EXPECT_EQ(kBaseTimestamp, packet->header.timestamp);
+ EXPECT_EQ(10, packet->payload_length);
+ EXPECT_TRUE(packet->primary);
+ delete [] packet->payload;
+ delete packet;
+ packet_list.pop_front();
+
+ // Check third packet.
+ packet = packet_list.front();
+ VerifyPacket(packet, 10, 0, kSequenceNumber, kBaseTimestamp, 0, true);
+ delete [] packet->payload;
+ delete packet;
+ packet_list.pop_front();
+
+ // Check fourth packet.
+ packet = packet_list.front();
+ VerifyPacket(packet, 10, 1, kSequenceNumber, kBaseTimestamp, 0, true);
+ delete [] packet->payload;
+ delete packet;
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/peak_detection.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/peak_detection.c
deleted file mode 100644
index 8c85d2a837e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/peak_detection.c
+++ /dev/null
@@ -1,232 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Implementation of the peak detection used for finding correlation peaks.
- */
-
-#include "dsp_helpfunctions.h"
-
-#include "signal_processing_library.h"
-
-/* Table of constants used in parabolic fit function WebRtcNetEQ_PrblFit */
-const int16_t WebRtcNetEQ_kPrblCf[17][3] = { { 120, 32, 64 }, { 140, 44, 75 },
- { 150, 50, 80 }, { 160, 57, 85 },
- { 180, 72, 96 }, { 200, 89, 107 },
- { 210, 98, 112 }, { 220, 108, 117 },
- { 240, 128, 128 }, { 260, 150, 139 },
- { 270, 162, 144 }, { 280, 174, 149 },
- { 300, 200, 160 }, { 320, 228, 171 },
- { 330, 242, 176 }, { 340, 257, 181 },
- { 360, 288, 192 } };
-
-int16_t WebRtcNetEQ_PeakDetection(int16_t *pw16_data, int16_t w16_dataLen,
- int16_t w16_nmbPeaks, int16_t fs_mult,
- int16_t *pw16_winIndex,
- int16_t *pw16_winValue)
-{
- /* Local variables */
- int i;
- int16_t w16_tmp;
- int16_t w16_tmp2;
- int16_t indMin = 0;
- int16_t indMax = 0;
-
- /* Peak detection */
-
- for (i = 0; i <= (w16_nmbPeaks - 1); i++)
- {
- if (w16_nmbPeaks == 1)
- {
- /*
- * Single peak
- * The parabola fit assumes that an extra point is available; worst case it gets
- * a zero on the high end of the signal.
- */
- w16_dataLen++;
- }
-
- pw16_winIndex[i] = WebRtcSpl_MaxIndexW16(pw16_data, (int16_t) (w16_dataLen - 1));
-
- if (i != w16_nmbPeaks - 1)
- {
- w16_tmp = pw16_winIndex[i] - 2; /* *fs_mult; */
- indMin = WEBRTC_SPL_MAX(0, w16_tmp);
- w16_tmp = pw16_winIndex[i] + 2; /* *fs_mult; */
- w16_tmp2 = w16_dataLen - 1;
- indMax = WEBRTC_SPL_MIN(w16_tmp2, w16_tmp);
- }
-
- if ((pw16_winIndex[i] != 0) && (pw16_winIndex[i] != (w16_dataLen - 2)))
- {
- /* Parabola fit*/
- WebRtcNetEQ_PrblFit(&(pw16_data[pw16_winIndex[i] - 1]), &(pw16_winIndex[i]),
- &(pw16_winValue[i]), fs_mult);
- }
- else
- {
- if (pw16_winIndex[i] == (w16_dataLen - 2))
- {
- if (pw16_data[pw16_winIndex[i]] > pw16_data[pw16_winIndex[i] + 1])
- {
- WebRtcNetEQ_PrblFit(&(pw16_data[pw16_winIndex[i] - 1]),
- &(pw16_winIndex[i]), &(pw16_winValue[i]), fs_mult);
- }
- else if (pw16_data[pw16_winIndex[i]] <= pw16_data[pw16_winIndex[i] + 1])
- {
- pw16_winValue[i] = (pw16_data[pw16_winIndex[i]]
- + pw16_data[pw16_winIndex[i] + 1]) >> 1; /* lin approx */
- pw16_winIndex[i] = (pw16_winIndex[i] * 2 + 1) * fs_mult;
- }
- }
- else
- {
- pw16_winValue[i] = pw16_data[pw16_winIndex[i]];
- pw16_winIndex[i] = pw16_winIndex[i] * 2 * fs_mult;
- }
- }
-
- if (i != w16_nmbPeaks - 1)
- {
- WebRtcSpl_MemSetW16(&(pw16_data[indMin]), 0, (indMax - indMin + 1));
- /* for (j=indMin; j<=indMax; j++) pw16_data[j] = 0; */
- }
- }
-
- return 0;
-}
-
-int16_t WebRtcNetEQ_PrblFit(int16_t *pw16_3pts, int16_t *pw16_Ind,
- int16_t *pw16_outVal, int16_t fs_mult)
-{
- /* Variables */
- int32_t Num, Den;
- int32_t temp;
- int16_t flag, stp, strt, lmt;
- uint16_t PFind[13];
-
- if (fs_mult == 1)
- {
- PFind[0] = 0;
- PFind[1] = 8;
- PFind[2] = 16;
- }
- else if (fs_mult == 2)
- {
- PFind[0] = 0;
- PFind[1] = 4;
- PFind[2] = 8;
- PFind[3] = 12;
- PFind[4] = 16;
- }
- else if (fs_mult == 4)
- {
- PFind[0] = 0;
- PFind[1] = 2;
- PFind[2] = 4;
- PFind[3] = 6;
- PFind[4] = 8;
- PFind[5] = 10;
- PFind[6] = 12;
- PFind[7] = 14;
- PFind[8] = 16;
- }
- else
- {
- PFind[0] = 0;
- PFind[1] = 1;
- PFind[2] = 3;
- PFind[3] = 4;
- PFind[4] = 5;
- PFind[5] = 7;
- PFind[6] = 8;
- PFind[7] = 9;
- PFind[8] = 11;
- PFind[9] = 12;
- PFind[10] = 13;
- PFind[11] = 15;
- PFind[12] = 16;
- }
-
- /* Num = -3*pw16_3pts[0] + 4*pw16_3pts[1] - pw16_3pts[2]; */
- /* Den = pw16_3pts[0] - 2*pw16_3pts[1] + pw16_3pts[2]; */
- Num = WEBRTC_SPL_MUL_16_16(pw16_3pts[0],-3) + WEBRTC_SPL_MUL_16_16(pw16_3pts[1],4)
- - pw16_3pts[2];
-
- Den = pw16_3pts[0] + WEBRTC_SPL_MUL_16_16(pw16_3pts[1],-2) + pw16_3pts[2];
-
- temp = (int32_t) WEBRTC_SPL_MUL(Num, (int32_t)120); /* need 32_16 really */
- flag = 1;
- stp = WebRtcNetEQ_kPrblCf[PFind[fs_mult]][0] - WebRtcNetEQ_kPrblCf[PFind[fs_mult - 1]][0];
- strt = (WebRtcNetEQ_kPrblCf[PFind[fs_mult]][0]
- + WebRtcNetEQ_kPrblCf[PFind[fs_mult - 1]][0]) >> 1;
-
- if (temp < (int32_t) WEBRTC_SPL_MUL(-Den,(int32_t)strt))
- {
- lmt = strt - stp;
- while (flag)
- {
- if ((flag == fs_mult) || (temp
- > (int32_t) WEBRTC_SPL_MUL(-Den,(int32_t)lmt)))
- {
- *pw16_outVal
- = (int16_t)
- (((int32_t) ((int32_t) WEBRTC_SPL_MUL(Den,(int32_t)WebRtcNetEQ_kPrblCf[PFind[fs_mult-flag]][1])
- + (int32_t) WEBRTC_SPL_MUL(Num,(int32_t)WebRtcNetEQ_kPrblCf[PFind[fs_mult-flag]][2])
- + WEBRTC_SPL_MUL_16_16(pw16_3pts[0],256))) >> 8);
- *pw16_Ind = (*pw16_Ind) * (fs_mult << 1) - flag;
- flag = 0;
- }
- else
- {
- flag++;
- lmt -= stp;
- }
- }
- }
- else if (temp > (int32_t) WEBRTC_SPL_MUL(-Den,(int32_t)(strt+stp)))
- {
- lmt = strt + (stp << 1);
- while (flag)
- {
- if ((flag == fs_mult) || (temp
- < (int32_t) WEBRTC_SPL_MUL(-Den,(int32_t)lmt)))
- {
- int32_t temp_term_1, temp_term_2, temp_term_3;
-
- temp_term_1 = WEBRTC_SPL_MUL(Den,
- (int32_t) WebRtcNetEQ_kPrblCf[PFind[fs_mult+flag]][1]);
- temp_term_2 = WEBRTC_SPL_MUL(Num,
- (int32_t) WebRtcNetEQ_kPrblCf[PFind[fs_mult+flag]][2]);
- temp_term_3 = WEBRTC_SPL_MUL_16_16(pw16_3pts[0],256);
-
- *pw16_outVal
- = (int16_t) ((temp_term_1 + temp_term_2 + temp_term_3) >> 8);
-
- *pw16_Ind = (*pw16_Ind) * (fs_mult << 1) + flag;
- flag = 0;
- }
- else
- {
- flag++;
- lmt += stp;
- }
- }
-
- }
- else
- {
- *pw16_outVal = pw16_3pts[1];
- *pw16_Ind = (*pw16_Ind) * 2 * fs_mult;
- }
-
- return 0;
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad.cc
index c3d5c7edd71..7ae7f97abc9 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/post_decode_vad.h"
+#include "webrtc/modules/audio_coding/neteq/post_decode_vad.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad.h
index eb197d9ef24..e713009c85f 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad.h
@@ -8,17 +8,17 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_POST_DECODE_VAD_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_POST_DECODE_VAD_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_POST_DECODE_VAD_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_POST_DECODE_VAD_H_
#include <string> // size_t
+#include "webrtc/base/constructormagic.h"
#include "webrtc/common_audio/vad/include/webrtc_vad.h"
#include "webrtc/common_types.h" // NULL
-#include "webrtc/modules/audio_coding/neteq4/defines.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
-#include "webrtc/modules/audio_coding/neteq4/packet.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/modules/audio_coding/neteq/defines.h"
+#include "webrtc/modules/audio_coding/neteq/interface/audio_decoder.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -69,4 +69,4 @@ class PostDecodeVad {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_POST_DECODE_VAD_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_POST_DECODE_VAD_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad_unittest.cc
index a4d9da8e166..ed48db858d4 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/post_decode_vad_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/post_decode_vad_unittest.cc
@@ -10,7 +10,7 @@
// Unit tests for PostDecodeVad class.
-#include "webrtc/modules/audio_coding/neteq4/post_decode_vad.h"
+#include "webrtc/modules/audio_coding/neteq/post_decode_vad.h"
#include "gtest/gtest.h"
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.c
deleted file mode 100644
index e56c0628415..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.c
+++ /dev/null
@@ -1,527 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file contains the Pre-emptive Expand algorithm that is used to increase
- * the delay by repeating a part of the audio stream.
- */
-
-#include "dsp.h"
-
-#include "signal_processing_library.h"
-
-#include "dsp_helpfunctions.h"
-#include "neteq_error_codes.h"
-
-#define PREEMPTIVE_CORR_LEN 50
-#define PREEMPTIVE_MIN_LAG 10
-#define PREEMPTIVE_MAX_LAG 60
-#define PREEMPTIVE_DOWNSAMPLED_LEN (PREEMPTIVE_CORR_LEN + PREEMPTIVE_MAX_LAG)
-
-/* Scratch usage:
-
- Type Name size startpos endpos
- int16_t pw16_downSampSpeech 110 0 109
- int32_t pw32_corr 2*50 110 209
- int16_t pw16_corr 50 0 49
-
- Total: 110+2*50
- */
-
-#define SCRATCH_PW16_DS_SPEECH 0
-#define SCRATCH_PW32_CORR PREEMPTIVE_DOWNSAMPLED_LEN
-#define SCRATCH_PW16_CORR 0
-
-/****************************************************************************
- * WebRtcNetEQ_PreEmptiveExpand(...)
- *
- * This function tries to extend the audio data by repeating one or several
- * pitch periods. The operation is only carried out if the correlation is
- * strong or if the signal energy is very low. The algorithm is the
- * reciprocal of the Accelerate algorithm.
- *
- * Input:
- * - inst : NetEQ DSP instance
- * - scratchPtr : Pointer to scratch vector.
- * - decoded : Pointer to newly decoded speech.
- * - len : Length of decoded speech.
- * - oldDataLen : Length of the part of decoded that has already been played out.
- * - BGNonly : If non-zero, Pre-emptive Expand will only copy
- * the first DEFAULT_TIME_ADJUST seconds of the
- * input and append to the end. No signal matching is
- * done.
- *
- * Output:
- * - inst : Updated instance
- * - outData : Pointer to a memory space where the output data
- * should be stored. The vector must be at least
- * min(len + 120*fs/8000, NETEQ_MAX_OUTPUT_SIZE)
- * elements long.
- * - pw16_len : Number of samples written to outData.
- *
- * Return value : 0 - Ok
- * <0 - Error
- */
-
-int WebRtcNetEQ_PreEmptiveExpand(DSPInst_t *inst,
-#ifdef SCRATCH
- int16_t *pw16_scratchPtr,
-#endif
- const int16_t *pw16_decoded, int len, int oldDataLen,
- int16_t *pw16_outData, int16_t *pw16_len,
- int16_t BGNonly)
-{
-
-#ifdef SCRATCH
- /* Use scratch memory for internal temporary vectors */
- int16_t *pw16_downSampSpeech = pw16_scratchPtr + SCRATCH_PW16_DS_SPEECH;
- int32_t *pw32_corr = (int32_t*) (pw16_scratchPtr + SCRATCH_PW32_CORR);
- int16_t *pw16_corr = pw16_scratchPtr + SCRATCH_PW16_CORR;
-#else
- /* Allocate memory for temporary vectors */
- int16_t pw16_downSampSpeech[PREEMPTIVE_DOWNSAMPLED_LEN];
- int32_t pw32_corr[PREEMPTIVE_CORR_LEN];
- int16_t pw16_corr[PREEMPTIVE_CORR_LEN];
-#endif
- int16_t w16_decodedMax = 0;
- int16_t w16_tmp = 0;
- int16_t w16_tmp2;
- int32_t w32_tmp;
- int32_t w32_tmp2;
-
- const int16_t w16_startLag = PREEMPTIVE_MIN_LAG;
- const int16_t w16_endLag = PREEMPTIVE_MAX_LAG;
- const int16_t w16_corrLen = PREEMPTIVE_CORR_LEN;
- const int16_t *pw16_vec1, *pw16_vec2;
- int16_t *pw16_vectmp;
- int16_t w16_inc, w16_startfact;
- int16_t w16_bestIndex, w16_bestVal;
- int16_t w16_VAD = 1;
- int16_t fsMult;
- int16_t fsMult120;
- int32_t w32_en1, w32_en2, w32_cc;
- int16_t w16_en1, w16_en2;
- int16_t w16_en1Scale, w16_en2Scale;
- int16_t w16_sqrtEn1En2;
- int16_t w16_bestCorr = 0;
- int ok;
-
-#ifdef NETEQ_STEREO
- MasterSlaveInfo *msInfo = inst->msInfo;
-#endif
-
- fsMult = WebRtcNetEQ_CalcFsMult(inst->fs); /* Calculate fs/8000 */
-
- /* Pre-calculate common multiplication with fsMult */
- fsMult120 = (int16_t) WEBRTC_SPL_MUL_16_16(fsMult, 120); /* 15 ms */
-
- inst->ExpandInst.w16_consecExp = 0; /* Last was not expand any more */
-
- /*
- * Sanity check for len variable; must be (almost) 30 ms (120*fsMult + max(bestIndex)).
- * Also, the new part must be at least .625 ms (w16_overlap).
- */
- if (len < (int16_t) WEBRTC_SPL_MUL_16_16((120 + 119), fsMult) || oldDataLen >= len
- - inst->ExpandInst.w16_overlap)
- {
- /* Length of decoded data too short */
- inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
- *pw16_len = len;
-
-
- /* simply move all data from decoded to outData */
-
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
-
- return NETEQ_OTHER_ERROR;
- }
-
- /***********************************/
- /* Special operations for BGN only */
- /***********************************/
-
- /* Check if "background noise only" flag is set */
- if (BGNonly)
- {
- /* special operation for BGN only; simply insert a chunk of data */
- w16_bestIndex = DEFAULT_TIME_ADJUST * (fsMult << 3); /* X*fs/1000 */
-
- /* Sanity check for bestIndex */
- if (w16_bestIndex > len)
- { /* not good, do nothing instead */
- inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
- *pw16_len = len;
-
-
- /* simply move all data from decoded to outData */
-
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
-
- return NETEQ_OTHER_ERROR;
- }
-
- /* set length parameter */
- *pw16_len = len + w16_bestIndex;
-
-
- /* copy to output */
-
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, len);
- WEBRTC_SPL_MEMCPY_W16(&pw16_outData[len], pw16_decoded, w16_bestIndex);
-
- /* set mode */
- inst->w16_mode = MODE_LOWEN_PREEMPTIVE;
-
- /* update statistics */
- inst->statInst.preemptiveLength += w16_bestIndex;
- /* Short-term activity statistics. */
- inst->activity_stats.preemptive_expand_bgn_samples += w16_bestIndex;
-
- return 0;
- } /* end of special code for BGN mode */
-
-#ifdef NETEQ_STEREO
-
- /* Sanity for msInfo */
- if (msInfo == NULL)
- {
- /* this should not happen here */
- return MASTER_SLAVE_ERROR;
- }
-
- if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
- {
- /* Find correlation lag only for non-slave instances */
-
-#endif
-
- /****************************************************************/
- /* Find the strongest correlation lag by downsampling to 4 kHz, */
- /* calculating correlation for downsampled signal and finding */
- /* the strongest correlation peak. */
- /****************************************************************/
-
- /* find maximum absolute value */
- w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (int16_t) len);
-
- /* downsample the decoded speech to 4 kHz */
- ok = WebRtcNetEQ_DownSampleTo4kHz(pw16_decoded, len, inst->fs, pw16_downSampSpeech,
- PREEMPTIVE_DOWNSAMPLED_LEN, 1 /* compensate delay*/);
- if (ok != 0)
- {
- /* error */
- inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
- *pw16_len = len;
-
-
- /* simply move all data from decoded to outData */
-
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
-
- return NETEQ_OTHER_ERROR;
- }
-
- /*
- * Set scaling factor for cross correlation to protect against
- * overflow (log2(50) => 6)
- */
- w16_tmp = 6 - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax));
- w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
-
- /* Perform correlation from lag 10 to lag 60 in 4 kHz domain */WebRtcNetEQ_CrossCorr(
- pw32_corr, &pw16_downSampSpeech[w16_endLag],
- &pw16_downSampSpeech[w16_endLag - w16_startLag], w16_corrLen,
- (int16_t) (w16_endLag - w16_startLag), w16_tmp, -1);
-
- /* Normalize correlation to 14 bits and put in a int16_t vector */
- w32_tmp = WebRtcSpl_MaxAbsValueW32(pw32_corr, w16_corrLen);
- w16_tmp = 17 - WebRtcSpl_NormW32(w32_tmp);
- w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
-
- WebRtcSpl_VectorBitShiftW32ToW16(pw16_corr, w16_corrLen, pw32_corr, w16_tmp);
-
- /* Find limits for peak finding, in order to avoid overful NetEQ algorithm buffer. */
- /* Calculate difference between MAX_OUTPUT_SIZE and len in 4 kHz domain. */
- w16_tmp = WebRtcSpl_DivW32W16ResW16((int32_t) (NETEQ_MAX_OUTPUT_SIZE - len),
- (int16_t) (fsMult << 1)) - w16_startLag;
- w16_tmp = WEBRTC_SPL_MIN(w16_corrLen, w16_tmp); /* no more than corrLen = 50 */
-
-#ifdef NETEQ_STEREO
- } /* end if (msInfo->msMode != NETEQ_SLAVE) */
-
- if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
- {
- /* Find the strongest correlation peak by using the parabolic fit method */
- WebRtcNetEQ_PeakDetection(pw16_corr, w16_tmp, 1, fsMult, &w16_bestIndex, &w16_bestVal);
- /* 0 <= bestIndex <= (2*w16_tmp - 1)*fsMult <= (2*corrLen - 1)*fsMult = 99*fsMult */
-
- /* Compensate bestIndex for displaced starting position */
- w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
- /* 20*fsMult <= bestIndex <= 119*fsMult */
-
- msInfo->bestIndex = w16_bestIndex;
- }
- else if (msInfo->msMode == NETEQ_SLAVE)
- {
- if (msInfo->extraInfo == PE_EXP_FAIL)
- {
- /* Master has signaled an unsuccessful preemptive expand */
- w16_bestIndex = 0;
- }
- else
- {
- /* Get best index from master */
- w16_bestIndex = msInfo->bestIndex;
- }
- }
- else
- {
- /* Invalid mode */
- return (MASTER_SLAVE_ERROR);
- }
-
-#else /* NETEQ_STEREO */
-
- /* Find the strongest correlation peak by using the parabolic fit method */
- WebRtcNetEQ_PeakDetection(pw16_corr, w16_tmp, 1, fsMult, &w16_bestIndex, &w16_bestVal);
- /* 0 <= bestIndex <= (2*w16_tmp - 1)*fsMult <= (2*corrLen - 1)*fsMult = 99*fsMult */
-
- /* Compensate bestIndex for displaced starting position */
- w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
- /* 20*fsMult <= bestIndex <= 119*fsMult */
-
-#endif /* NETEQ_STEREO */
-
-#ifdef NETEQ_STEREO
-
- if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
- {
- /* Calculate correlation only for non-slave instances */
-
-#endif /* NETEQ_STEREO */
-
- /*****************************************************/
- /* Calculate correlation bestCorr for the found lag. */
- /* Also do a simple VAD decision. */
- /*****************************************************/
-
- /*
- * Calculate scaling to ensure that bestIndex samples can be square-summed
- * without overflowing
- */
- w16_tmp = (31
- - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax)));
- w16_tmp += (31 - WebRtcSpl_NormW32(w16_bestIndex));
- w16_tmp -= 31;
- w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
-
- /* vec1 starts at 15 ms minus one pitch period */
- pw16_vec1 = &pw16_decoded[fsMult120 - w16_bestIndex];
- /* vec2 start at 15 ms */
- pw16_vec2 = &pw16_decoded[fsMult120];
-
- /* Calculate energies for vec1 and vec2 */
- w32_en1 = WebRtcNetEQ_DotW16W16((int16_t*) pw16_vec1,
- (int16_t*) pw16_vec1, w16_bestIndex, w16_tmp);
- w32_en2 = WebRtcNetEQ_DotW16W16((int16_t*) pw16_vec2,
- (int16_t*) pw16_vec2, w16_bestIndex, w16_tmp);
-
- /* Calculate cross-correlation at the found lag */
- w32_cc = WebRtcNetEQ_DotW16W16((int16_t*) pw16_vec1, (int16_t*) pw16_vec2,
- w16_bestIndex, w16_tmp);
-
- /* Check VAD constraint
- ((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
- w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_en1 + w32_en2, 4); /* (en1+en2)/(2*8) */
- if (inst->BGNInst.w16_initialized == 1)
- {
- w32_tmp2 = inst->BGNInst.w32_energy;
- }
- else
- {
- /* if BGN parameters have not been estimated, use a fixed threshold */
- w32_tmp2 = 75000;
- }
- w16_tmp2 = 16 - WebRtcSpl_NormW32(w32_tmp2);
- w16_tmp2 = WEBRTC_SPL_MAX(0, w16_tmp2);
- w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_tmp, w16_tmp2);
- w16_tmp2 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_tmp2, w16_tmp2);
- w32_tmp2 = WEBRTC_SPL_MUL_16_16(w16_bestIndex, w16_tmp2);
-
- /* Scale w32_tmp properly before comparing with w32_tmp2 */
- /* (w16_tmp is scaling before energy calculation, thus 2*w16_tmp) */
- if (WebRtcSpl_NormW32(w32_tmp) < WEBRTC_SPL_LSHIFT_W32(w16_tmp,1))
- {
- /* Cannot scale only w32_tmp, must scale w32_temp2 too */
- int16_t tempshift = WebRtcSpl_NormW32(w32_tmp);
- w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, tempshift);
- w32_tmp2 = WEBRTC_SPL_RSHIFT_W32(w32_tmp2,
- WEBRTC_SPL_LSHIFT_W32(w16_tmp,1) - tempshift);
- }
- else
- {
- w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp,
- WEBRTC_SPL_LSHIFT_W32(w16_tmp,1));
- }
-
- if (w32_tmp <= w32_tmp2) /*((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
- {
- /* The signal seems to be passive speech */
- w16_VAD = 0;
- w16_bestCorr = 0; /* Correlation does not matter */
-
- /* For low energy expansion, the new data can be less than 15 ms,
- but we must ensure that bestIndex is not larger than the new data. */
- w16_bestIndex = WEBRTC_SPL_MIN( w16_bestIndex, len - oldDataLen );
- }
- else
- {
- /* The signal is active speech */
- w16_VAD = 1;
-
- /* Calculate correlation (cc/sqrt(en1*en2)) */
-
- /* Start with calculating scale values */
- w16_en1Scale = 16 - WebRtcSpl_NormW32(w32_en1);
- w16_en1Scale = WEBRTC_SPL_MAX(0, w16_en1Scale);
- w16_en2Scale = 16 - WebRtcSpl_NormW32(w32_en2);
- w16_en2Scale = WEBRTC_SPL_MAX(0, w16_en2Scale);
-
- /* Make sure total scaling is even (to simplify scale factor after sqrt) */
- if ((w16_en1Scale + w16_en2Scale) & 1)
- {
- w16_en1Scale += 1;
- }
-
- /* Convert energies to int16_t */
- w16_en1 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
- w16_en2 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
-
- /* Calculate energy product */
- w32_tmp = WEBRTC_SPL_MUL_16_16(w16_en1, w16_en2);
-
- /* Calculate square-root of energy product */
- w16_sqrtEn1En2 = (int16_t) WebRtcSpl_SqrtFloor(w32_tmp);
-
- /* Calculate cc/sqrt(en1*en2) in Q14 */
- w16_tmp = 14 - ((w16_en1Scale + w16_en2Scale) >> 1);
- w32_cc = WEBRTC_SPL_SHIFT_W32(w32_cc, w16_tmp);
- w32_cc = WEBRTC_SPL_MAX(0, w32_cc); /* Don't divide with negative number */
- w16_bestCorr = (int16_t) WebRtcSpl_DivW32W16(w32_cc, w16_sqrtEn1En2);
- w16_bestCorr = WEBRTC_SPL_MIN(16384, w16_bestCorr); /* set maximum to 1.0 */
- }
-
-#ifdef NETEQ_STEREO
-
- } /* end if (msInfo->msMode != NETEQ_SLAVE) */
-
-#endif /* NETEQ_STEREO */
-
- /*******************************************************/
- /* Check preemptive expand criteria and insert samples */
- /*******************************************************/
-
- /* Check for strong correlation (>0.9) and at least 15 ms new data,
- or passive speech */
-#ifdef NETEQ_STEREO
- if (((((w16_bestCorr > 14746) && (oldDataLen <= fsMult120)) || (w16_VAD == 0))
- && (msInfo->msMode != NETEQ_SLAVE)) || ((msInfo->msMode == NETEQ_SLAVE)
- && (msInfo->extraInfo != PE_EXP_FAIL)))
-#else
- if (((w16_bestCorr > 14746) && (oldDataLen <= fsMult120))
- || (w16_VAD == 0))
-#endif
- {
- /* Do expand operation by overlap add */
-
- /* Set length of the first part, not to be modified */
- int16_t w16_startIndex = WEBRTC_SPL_MAX(oldDataLen, fsMult120);
-
- /*
- * Calculate cross-fading slope so that the fading factor goes from
- * 1 (16384 in Q14) to 0 in one pitch period (bestIndex).
- */
- w16_inc = (int16_t) WebRtcSpl_DivW32W16((int32_t) 16384,
- (int16_t) (w16_bestIndex + 1)); /* in Q14 */
-
- /* Initiate fading factor */
- w16_startfact = 16384 - w16_inc;
-
- /* vec1 starts at 15 ms minus one pitch period */
- pw16_vec1 = &pw16_decoded[w16_startIndex - w16_bestIndex];
- /* vec2 start at 15 ms */
- pw16_vec2 = &pw16_decoded[w16_startIndex];
-
-
- /* Copy unmodified part [0 to 15 ms] */
-
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, w16_startIndex);
-
- /* Generate interpolated part of length bestIndex (1 pitch period) */
- pw16_vectmp = pw16_outData + w16_startIndex;
- /* Reuse mixing function from Expand */
- WebRtcNetEQ_MixVoiceUnvoice(pw16_vectmp, (int16_t*) pw16_vec2,
- (int16_t*) pw16_vec1, &w16_startfact, w16_inc, w16_bestIndex);
-
- /* Move the last part (also unmodified) */
- /* Take from decoded at 15 ms */
- pw16_vec2 = &pw16_decoded[w16_startIndex];
- WEBRTC_SPL_MEMMOVE_W16(&pw16_outData[w16_startIndex + w16_bestIndex], pw16_vec2,
- (int16_t) (len - w16_startIndex));
-
- /* Set the mode flag */
- if (w16_VAD)
- {
- inst->w16_mode = MODE_SUCCESS_PREEMPTIVE;
- }
- else
- {
- inst->w16_mode = MODE_LOWEN_PREEMPTIVE;
- }
-
- /* Calculate resulting length = original length + pitch period */
- *pw16_len = len + w16_bestIndex;
-
- /* Update in-call statistics */
- inst->statInst.preemptiveLength += w16_bestIndex;
- /* Short-term activity statistics. */
- inst->activity_stats.preemptive_expand_normal_samples += w16_bestIndex;
- return 0;
- }
- else
- {
- /* Preemptive Expand not allowed */
-
-#ifdef NETEQ_STEREO
- /* Signal to slave(s) that this was unsuccessful */
- if (msInfo->msMode == NETEQ_MASTER)
- {
- msInfo->extraInfo = PE_EXP_FAIL;
- }
-#endif
-
- /* Set mode flag to unsuccessful preemptive expand */
- inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
-
- /* Length is unmodified */
- *pw16_len = len;
-
-
- /* Simply move all data from decoded to outData */
-
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
-
- return 0;
- }
-}
-
-#undef SCRATCH_PW16_DS_SPEECH
-#undef SCRATCH_PW32_CORR
-#undef SCRATCH_PW16_CORR
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/preemptive_expand.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.cc
index ac787eb1d66..b2dc3e60cba 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/preemptive_expand.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/preemptive_expand.h"
+#include "webrtc/modules/audio_coding/neteq/preemptive_expand.h"
#include <algorithm> // min, max
@@ -98,4 +98,13 @@ PreemptiveExpand::ReturnCodes PreemptiveExpand::CheckCriteriaAndStretch(
}
}
+PreemptiveExpand* PreemptiveExpandFactory::Create(
+ int sample_rate_hz,
+ size_t num_channels,
+ const BackgroundNoise& background_noise,
+ int overlap_samples) const {
+ return new PreemptiveExpand(
+ sample_rate_hz, num_channels, background_noise, overlap_samples);
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/preemptive_expand.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.h
index 4cd92cc0bb6..1aa61330145 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/preemptive_expand.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/preemptive_expand.h
@@ -8,14 +8,14 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PREEMPTIVE_EXPAND_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PREEMPTIVE_EXPAND_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_PREEMPTIVE_EXPAND_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_PREEMPTIVE_EXPAND_H_
#include <assert.h>
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/modules/audio_coding/neteq4/time_stretch.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
+#include "webrtc/modules/audio_coding/neteq/time_stretch.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -29,11 +29,13 @@ class BackgroundNoise;
// PreemptiveExpand are implemented.
class PreemptiveExpand : public TimeStretch {
public:
- PreemptiveExpand(int sample_rate_hz, size_t num_channels,
- const BackgroundNoise& background_noise)
+ PreemptiveExpand(int sample_rate_hz,
+ size_t num_channels,
+ const BackgroundNoise& background_noise,
+ int overlap_samples)
: TimeStretch(sample_rate_hz, num_channels, background_noise),
old_data_length_per_channel_(-1),
- overlap_samples_(5 * sample_rate_hz / 8000) {
+ overlap_samples_(overlap_samples) {
}
virtual ~PreemptiveExpand() {}
@@ -70,5 +72,16 @@ class PreemptiveExpand : public TimeStretch {
DISALLOW_COPY_AND_ASSIGN(PreemptiveExpand);
};
+struct PreemptiveExpandFactory {
+ PreemptiveExpandFactory() {}
+ virtual ~PreemptiveExpandFactory() {}
+
+ virtual PreemptiveExpand* Create(
+ int sample_rate_hz,
+ size_t num_channels,
+ const BackgroundNoise& background_noise,
+ int overlap_samples) const;
+};
+
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PREEMPTIVE_EXPAND_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_PREEMPTIVE_EXPAND_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.c
deleted file mode 100644
index c168ab5437f..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.c
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This function generates a pseudo-random vector.
- */
-
-#include "dsp_helpfunctions.h"
-
-/*
- * Values are normalized so that
- * sqrt(dot(pw16_NETEQFIX_RANDN_TBL,pw16_NETEQFIX_RANDN_TBL)/256)=2^13
- */
-const int16_t WebRtcNetEQ_kRandnTbl[RANDVEC_NO_OF_SAMPLES] =
-{
- 2680, 5532, 441, 5520, 16170, -5146, -1024, -8733, 3115, 9598, -10380, -4959, -1280, -21716, 7133, -1522,
- 13458, -3902, 2789, -675, 3441, 5016, -13599, -4003, -2739, 3922, -7209, 13352, -11617, -7241, 12905, -2314,
- 5426, 10121, -9702, 11207, -13542, 1373, 816, -5934, -12504, 4798, 1811, 4112, -613, 201, -10367, -2960,
- -2419, 3442, 4299, -6116, -6092, 1552, -1650, -480, -1237, 18720, -11858, -8303, -8212, 865, -2890, -16968,
- 12052, -5845, -5912, 9777, -5665, -6294, 5426, -4737, -6335, 1652, 761, 3832, 641, -8552, -9084, -5753,
- 8146, 12156, -4915, 15086, -1231, -1869, 11749, -9319, -6403, 11407, 6232, -1683, 24340, -11166, 4017, -10448,
- 3153, -2936, 6212, 2891, -866, -404, -4807, -2324, -1917, -2388, -6470, -3895, -10300, 5323, -5403, 2205,
- 4640, 7022, -21186, -6244, -882, -10031, -3395, -12885, 7155, -5339, 5079, -2645, -9515, 6622, 14651, 15852,
- 359, 122, 8246, -3502, -6696, -3679, -13535, -1409, -704, -7403, -4007, 1798, 279, -420, -12796, -14219,
- 1141, 3359, 11434, 7049, -6684, -7473, 14283, -4115, -9123, -8969, 4152, 4117, 13792, 5742, 16168, 8661,
- -1609, -6095, 1881, 14380, -5588, 6758, -6425, -22969, -7269, 7031, 1119, -1611, -5850, -11281, 3559, -8952,
- -10146, -4667, -16251, -1538, 2062, -1012, -13073, 227, -3142, -5265, 20, 5770, -7559, 4740, -4819, 992,
- -8208, -7130, -4652, 6725, 7369, -1036, 13144, -1588, -5304, -2344, -449, -5705, -8894, 5205, -17904, -11188,
- -1022, 4852, 10101, -5255, -4200, -752, 7941, -1543, 5959, 14719, 13346, 17045, -15605, -1678, -1600, -9230,
- 68, 23348, 1172, 7750, 11212, -18227, 9956, 4161, 883, 3947, 4341, 1014, -4889, -2603, 1246, -5630,
- -3596, -870, -1298, 2784, -3317, -6612, -20541, 4166, 4181, -8625, 3562, 12890, 4761, 3205, -12259, -8579
-};
-
-
-void WebRtcNetEQ_RandomVec(uint32_t *w32_seed, int16_t *pw16_randVec,
- int16_t w16_len, int16_t w16_incval)
-{
- int i;
- int16_t w16_pos;
- for (i = 0; i < w16_len; i++)
- {
- *w32_seed = (*w32_seed) + w16_incval;
- w16_pos = (int16_t) ((*w32_seed) & (RANDVEC_NO_OF_SAMPLES - 1));
- pw16_randVec[i] = WebRtcNetEQ_kRandnTbl[w16_pos];
- }
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.cc
index 823909f1352..b12f2171553 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/random_vector.h"
+#include "webrtc/modules/audio_coding/neteq/random_vector.h"
namespace webrtc {
@@ -54,4 +54,4 @@ void RandomVector::IncreaseSeedIncrement(int16_t increase_by) {
seed_increment_+= increase_by;
seed_increment_ &= kRandomTableSize - 1;
}
-}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.h
index 64cfe0d9dd0..767dc48eee3 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector.h
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_RANDOM_VECTOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_RANDOM_VECTOR_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_RANDOM_VECTOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_RANDOM_VECTOR_H_
#include <string.h> // size_t
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -47,4 +47,4 @@ class RandomVector {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_RANDOM_VECTOR_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_RANDOM_VECTOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector_unittest.cc
index 83193e2a7bb..cbdcdf7c829 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/random_vector_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/random_vector_unittest.cc
@@ -10,7 +10,7 @@
// Unit tests for RandomVector class.
-#include "webrtc/modules/audio_coding/neteq4/random_vector.h"
+#include "webrtc/modules/audio_coding/neteq/random_vector.h"
#include "gtest/gtest.h"
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/recin.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/recin.c
deleted file mode 100644
index 17bea5f5bbf..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/recin.c
+++ /dev/null
@@ -1,531 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Implementation of the RecIn function, which is the main function for inserting RTP
- * packets into NetEQ.
- */
-
-#include "mcu.h"
-
-#include <string.h>
-
-#include "automode.h"
-#include "dtmf_buffer.h"
-#include "mcu_dsp_common.h"
-#include "neteq_defines.h"
-#include "neteq_error_codes.h"
-#include "signal_processing_library.h"
-
-int WebRtcNetEQ_RecInInternal(MCUInst_t *MCU_inst, RTPPacket_t *RTPpacketInput,
- uint32_t uw32_timeRec)
-{
- RTPPacket_t RTPpacket[2];
- int i_k;
- int i_ok = 0, i_No_Of_Payloads = 1;
- int16_t flushed = 0;
- int16_t codecPos;
- int curr_Codec;
- int16_t isREDPayload = 0;
- int32_t temp_bufsize;
- int is_sync_rtp = MCU_inst->av_sync && WebRtcNetEQ_IsSyncPayload(
- RTPpacketInput->payload, RTPpacketInput->payloadLen);
-#ifdef NETEQ_RED_CODEC
- RTPPacket_t* RTPpacketPtr[2]; /* Support for redundancy up to 2 payloads */
- RTPpacketPtr[0] = &RTPpacket[0];
- RTPpacketPtr[1] = &RTPpacket[1];
-#endif
-
- temp_bufsize = WebRtcNetEQ_PacketBufferGetSize(&MCU_inst->PacketBuffer_inst,
- &MCU_inst->codec_DB_inst,
- MCU_inst->av_sync);
- /*
- * Copy from input RTP packet to local copy
- * (mainly to enable multiple payloads using RED)
- */
-
- WEBRTC_SPL_MEMCPY_W8(&RTPpacket[0], RTPpacketInput, sizeof(RTPPacket_t));
-
- /* Reinitialize NetEq if it's needed (changed SSRC or first call) */
-
- if ((RTPpacket[0].ssrc != MCU_inst->ssrc) || (MCU_inst->first_packet == 1))
- {
- WebRtcNetEQ_RTCPInit(&MCU_inst->RTCP_inst, RTPpacket[0].seqNumber);
- MCU_inst->first_packet = 0;
-
- /* Flush the buffer */
- WebRtcNetEQ_PacketBufferFlush(&MCU_inst->PacketBuffer_inst);
-
- /* Store new SSRC */
- MCU_inst->ssrc = RTPpacket[0].ssrc;
-
- /* Update codecs */
- MCU_inst->timeStamp = RTPpacket[0].timeStamp;
- MCU_inst->current_Payload = RTPpacket[0].payloadType;
-
- /*Set MCU to update codec on next SignalMCU call */
- MCU_inst->new_codec = 1;
-
- /* Reset timestamp scaling */
- MCU_inst->TSscalingInitialized = 0;
-
- }
-
- if (!is_sync_rtp) { /* Update only if it not sync packet. */
- /* Call RTCP statistics if it is not sync packet. */
- i_ok |= WebRtcNetEQ_RTCPUpdate(&(MCU_inst->RTCP_inst),
- RTPpacket[0].seqNumber,
- RTPpacket[0].timeStamp, uw32_timeRec);
- }
-
- /* If Redundancy is supported and this is the redundancy payload, separate the payloads */
-#ifdef NETEQ_RED_CODEC
- if (RTPpacket[0].payloadType == WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
- kDecoderRED))
- {
- if (is_sync_rtp)
- {
- /* Sync packet should not have RED payload type. */
- return RECIN_SYNC_RTP_NOT_ACCEPTABLE;
- }
-
- /* Split the payload into a main and a redundancy payloads */
- i_ok = WebRtcNetEQ_RedundancySplit(RTPpacketPtr, 2, &i_No_Of_Payloads);
- if (i_ok < 0)
- {
- /* error returned */
- return i_ok;
- }
-
- /*
- * Only accept a few redundancies of the same type as the main data,
- * AVT events and CNG.
- */
- if ((i_No_Of_Payloads > 1) && (RTPpacket[0].payloadType != RTPpacket[1].payloadType)
- && (RTPpacket[0].payloadType != WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
- kDecoderAVT)) && (RTPpacket[1].payloadType != WebRtcNetEQ_DbGetPayload(
- &MCU_inst->codec_DB_inst, kDecoderAVT)) && (!WebRtcNetEQ_DbIsCNGPayload(
- &MCU_inst->codec_DB_inst, RTPpacket[0].payloadType))
- && (!WebRtcNetEQ_DbIsCNGPayload(&MCU_inst->codec_DB_inst, RTPpacket[1].payloadType)))
- {
- i_No_Of_Payloads = 1;
- }
- isREDPayload = 1;
- }
-#endif
-
- /* loop over the number of payloads */
- for (i_k = 0; i_k < i_No_Of_Payloads; i_k++)
- {
-
- if (isREDPayload == 1)
- {
- RTPpacket[i_k].rcuPlCntr = i_k;
- }
- else
- {
- RTPpacket[i_k].rcuPlCntr = 0;
- }
-
- /* Force update of SplitInfo if it's iLBC because of potential change between 20/30ms */
- if (RTPpacket[i_k].payloadType == WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
- kDecoderILBC) && !is_sync_rtp) /* Don't update if sync RTP. */
- {
- i_ok = WebRtcNetEQ_DbGetSplitInfo(
- &MCU_inst->PayloadSplit_inst,
- (enum WebRtcNetEQDecoder) WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst,
- RTPpacket[i_k].payloadType), RTPpacket[i_k].payloadLen);
- if (i_ok < 0)
- {
- /* error returned */
- return i_ok;
- }
- }
-
- /* Get information about timestamp scaling for this payload type */
- i_ok = WebRtcNetEQ_GetTimestampScaling(MCU_inst, RTPpacket[i_k].payloadType);
- if (i_ok < 0)
- {
- /* error returned */
- return i_ok;
- }
-
- if (MCU_inst->TSscalingInitialized == 0 && MCU_inst->scalingFactor != kTSnoScaling)
- {
- /* Must initialize scaling with current timestamps */
- MCU_inst->externalTS = RTPpacket[i_k].timeStamp;
- MCU_inst->internalTS = RTPpacket[i_k].timeStamp;
- MCU_inst->TSscalingInitialized = 1;
- }
-
- /* Adjust timestamp if timestamp scaling is needed (e.g. SILK or G.722) */
- if (MCU_inst->TSscalingInitialized == 1)
- {
- uint32_t newTS = WebRtcNetEQ_ScaleTimestampExternalToInternal(MCU_inst,
- RTPpacket[i_k].timeStamp);
-
- /* save the incoming timestamp for next time */
- MCU_inst->externalTS = RTPpacket[i_k].timeStamp;
-
- /* add the scaled difference to last scaled timestamp and save ... */
- MCU_inst->internalTS = newTS;
-
- RTPpacket[i_k].timeStamp = newTS;
- }
-
- /* Is this a DTMF packet?*/
- if (RTPpacket[i_k].payloadType == WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
- kDecoderAVT))
- {
- if (is_sync_rtp)
- {
- /* Sync RTP should not have AVT payload type. */
- return RECIN_SYNC_RTP_NOT_ACCEPTABLE;
- }
-
-#ifdef NETEQ_ATEVENT_DECODE
- if (MCU_inst->AVT_PlayoutOn)
- {
- i_ok = WebRtcNetEQ_DtmfInsertEvent(&MCU_inst->DTMF_inst,
- RTPpacket[i_k].payload, RTPpacket[i_k].payloadLen,
- RTPpacket[i_k].timeStamp);
- if (i_ok != 0)
- {
- return i_ok;
- }
- }
-#endif
-#ifdef NETEQ_STEREO
- if (MCU_inst->usingStereo == 0)
- {
- /* do not set this for DTMF packets when using stereo mode */
- MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 1;
- }
-#else
- MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 1;
-#endif
- }
- else if (WebRtcNetEQ_DbIsCNGPayload(&MCU_inst->codec_DB_inst,
- RTPpacket[i_k].payloadType))
- {
- /* Is this a CNG packet? how should we handle this?*/
-#ifdef NETEQ_CNG_CODEC
- /* Get CNG sample rate */
- uint16_t fsCng = WebRtcNetEQ_DbGetSampleRate(&MCU_inst->codec_DB_inst,
- RTPpacket[i_k].payloadType);
- if (is_sync_rtp)
- {
- /* Sync RTP should not have CNG payload type. */
- return RECIN_SYNC_RTP_NOT_ACCEPTABLE;
- }
-
- /* Force sampling frequency to 32000 Hz CNG 48000 Hz. */
- /* TODO(tlegrand): remove limitation once ACM has full 48 kHz
- * support. */
- if (fsCng > 32000) {
- fsCng = 32000;
- }
- if ((fsCng != MCU_inst->fs) && (fsCng > 8000))
- {
- /*
- * We have received CNG with a different sample rate from what we are using
- * now (must be > 8000, since we may use only one CNG type (default) for all
- * frequencies). Flush buffer and signal new codec.
- */
- WebRtcNetEQ_PacketBufferFlush(&MCU_inst->PacketBuffer_inst);
- MCU_inst->new_codec = 1;
- MCU_inst->current_Codec = -1;
- }
- i_ok = WebRtcNetEQ_PacketBufferInsert(&MCU_inst->PacketBuffer_inst,
- &RTPpacket[i_k], &flushed, MCU_inst->av_sync);
- if (i_ok < 0)
- {
- return RECIN_CNG_ERROR;
- }
- MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 1;
-#else /* NETEQ_CNG_CODEC not defined */
- return RECIN_UNKNOWNPAYLOAD;
-#endif /* NETEQ_CNG_CODEC */
- }
- else
- {
- /* Reinitialize the splitting if the payload and/or the payload length has changed */
- curr_Codec = WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst,
- RTPpacket[i_k].payloadType);
- if (curr_Codec != MCU_inst->current_Codec)
- {
- if (curr_Codec < 0)
- {
- return RECIN_UNKNOWNPAYLOAD;
- }
- if (is_sync_rtp)
- {
- /* Sync RTP should not cause codec change. */
- return RECIN_SYNC_RTP_CHANGED_CODEC;
- }
- MCU_inst->current_Codec = curr_Codec;
- MCU_inst->current_Payload = RTPpacket[i_k].payloadType;
- i_ok = WebRtcNetEQ_DbGetSplitInfo(&MCU_inst->PayloadSplit_inst,
- (enum WebRtcNetEQDecoder) MCU_inst->current_Codec,
- RTPpacket[i_k].payloadLen);
- if (i_ok < 0)
- { /* error returned */
- return i_ok;
- }
- WebRtcNetEQ_PacketBufferFlush(&MCU_inst->PacketBuffer_inst);
- MCU_inst->new_codec = 1;
- }
-
- /* Parse the payload and insert it into the buffer */
- i_ok = WebRtcNetEQ_SplitAndInsertPayload(&RTPpacket[i_k],
- &MCU_inst->PacketBuffer_inst, &MCU_inst->PayloadSplit_inst,
- &flushed, MCU_inst->av_sync);
- if (i_ok < 0)
- {
- return i_ok;
- }
- if (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF != 0)
- {
- /* first normal packet after CNG or DTMF */
- MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = -1;
- }
- }
- /* Reset DSP timestamp etc. if packet buffer flushed */
- if (flushed)
- {
- MCU_inst->new_codec = 1;
- }
- }
-
- /*
- * If not sync RTP, update Bandwidth Estimate.
- * Only send the main payload to BWE.
- */
- if (!is_sync_rtp &&
- (curr_Codec = WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst,
- RTPpacket[0].payloadType)) >= 0)
- {
- codecPos = MCU_inst->codec_DB_inst.position[curr_Codec];
- if (MCU_inst->codec_DB_inst.funcUpdBWEst[codecPos] != NULL) /* codec has BWE function */
- {
- if (RTPpacket[0].starts_byte1) /* check for shifted byte alignment */
- {
- /* re-align to 16-bit alignment */
- for (i_k = 0; i_k < RTPpacket[0].payloadLen; i_k++)
- {
- WEBRTC_SPL_SET_BYTE(RTPpacket[0].payload,
- WEBRTC_SPL_GET_BYTE(RTPpacket[0].payload, i_k+1),
- i_k);
- }
- RTPpacket[0].starts_byte1 = 0;
- }
-
- MCU_inst->codec_DB_inst.funcUpdBWEst[codecPos](
- MCU_inst->codec_DB_inst.codec_state[codecPos],
- (const uint16_t *) RTPpacket[0].payload,
- (int32_t) RTPpacket[0].payloadLen, RTPpacket[0].seqNumber,
- (uint32_t) RTPpacket[0].timeStamp, (uint32_t) uw32_timeRec);
- }
- }
-
- if (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF == 0)
- {
- /* Calculate the total speech length carried in each packet */
- temp_bufsize = WebRtcNetEQ_PacketBufferGetSize(
- &MCU_inst->PacketBuffer_inst, &MCU_inst->codec_DB_inst,
- MCU_inst->av_sync) - temp_bufsize;
-
- if ((temp_bufsize > 0) && (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF
- == 0) && (temp_bufsize
- != MCU_inst->BufferStat_inst.Automode_inst.packetSpeechLenSamp))
- {
- /* Change the auto-mode parameters if packet length has changed */
- WebRtcNetEQ_SetPacketSpeechLen(&(MCU_inst->BufferStat_inst.Automode_inst),
- (int16_t) temp_bufsize, MCU_inst->fs);
- }
-
- /* update statistics */
- if ((int32_t) (RTPpacket[0].timeStamp - MCU_inst->timeStamp) >= 0
- && !MCU_inst->new_codec)
- {
- /*
- * Only update statistics if incoming packet is not older than last played out
- * packet, and if new codec flag is not set.
- */
- WebRtcNetEQ_UpdateIatStatistics(&MCU_inst->BufferStat_inst.Automode_inst,
- MCU_inst->PacketBuffer_inst.maxInsertPositions, RTPpacket[0].seqNumber,
- RTPpacket[0].timeStamp, MCU_inst->fs,
- WebRtcNetEQ_DbIsMDCodec((enum WebRtcNetEQDecoder) MCU_inst->current_Codec),
- (MCU_inst->NetEqPlayoutMode == kPlayoutStreaming));
- }
- }
- else if (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF == -1)
- {
- /*
- * This is first "normal" packet after CNG or DTMF.
- * Reset packet time counter and measure time until next packet,
- * but don't update statistics.
- */
- MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 0;
- MCU_inst->BufferStat_inst.Automode_inst.packetIatCountSamp = 0;
- }
- return 0;
-
-}
-
-int WebRtcNetEQ_GetTimestampScaling(MCUInst_t *MCU_inst, int rtpPayloadType)
-{
- enum WebRtcNetEQDecoder codec;
- int codecNumber;
-
- codecNumber = WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst, rtpPayloadType);
- if (codecNumber < 0)
- {
- /* error */
- return codecNumber;
- }
-
- /* cast to enumerator */
- codec = (enum WebRtcNetEQDecoder) codecNumber;
-
- /*
- * The factor obtained below is the number with which the RTP timestamp must be
- * multiplied to get the true sample count.
- */
- switch (codec)
- {
- case kDecoderG722:
- case kDecoderG722_2ch:
- {
- /* Use timestamp scaling with factor 2 (two output samples per RTP timestamp) */
- MCU_inst->scalingFactor = kTSscalingTwo;
- break;
- }
- case kDecoderISACfb:
- case kDecoderOpus:
- {
- /* We resample Opus internally to 32 kHz, and isac-fb decodes at
- * 32 kHz, but timestamps are counted at 48 kHz. So there are two
- * output samples per three RTP timestamp ticks. */
- MCU_inst->scalingFactor = kTSscalingTwoThirds;
- break;
- }
-
- case kDecoderAVT:
- case kDecoderCNG:
- {
- /* TODO(tlegrand): remove scaling once ACM has full 48 kHz
- * support. */
- uint16_t sample_freq =
- WebRtcNetEQ_DbGetSampleRate(&MCU_inst->codec_DB_inst,
- rtpPayloadType);
- if (sample_freq == 48000) {
- MCU_inst->scalingFactor = kTSscalingTwoThirds;
- }
-
- /* For sample_freq <= 32 kHz, do not change the timestamp scaling
- * settings. */
- break;
- }
- default:
- {
- /* do not use timestamp scaling */
- MCU_inst->scalingFactor = kTSnoScaling;
- break;
- }
- }
- return 0;
-}
-
-uint32_t WebRtcNetEQ_ScaleTimestampExternalToInternal(const MCUInst_t *MCU_inst,
- uint32_t externalTS)
-{
- int32_t timestampDiff;
- uint32_t internalTS;
-
- /* difference between this and last incoming timestamp */
- timestampDiff = externalTS - MCU_inst->externalTS;
-
- switch (MCU_inst->scalingFactor)
- {
- case kTSscalingTwo:
- {
- /* multiply with 2 */
- timestampDiff = WEBRTC_SPL_LSHIFT_W32(timestampDiff, 1);
- break;
- }
- case kTSscalingTwoThirds:
- {
- /* multiply with 2/3 */
- timestampDiff = WEBRTC_SPL_LSHIFT_W32(timestampDiff, 1);
- timestampDiff = WebRtcSpl_DivW32W16(timestampDiff, 3);
- break;
- }
- case kTSscalingFourThirds:
- {
- /* multiply with 4/3 */
- timestampDiff = WEBRTC_SPL_LSHIFT_W32(timestampDiff, 2);
- timestampDiff = WebRtcSpl_DivW32W16(timestampDiff, 3);
- break;
- }
- default:
- {
- /* no scaling */
- }
- }
-
- /* add the scaled difference to last scaled timestamp and save ... */
- internalTS = MCU_inst->internalTS + timestampDiff;
-
- return internalTS;
-}
-
-uint32_t WebRtcNetEQ_ScaleTimestampInternalToExternal(const MCUInst_t *MCU_inst,
- uint32_t internalTS)
-{
- int32_t timestampDiff;
- uint32_t externalTS;
-
- /* difference between this and last incoming timestamp */
- timestampDiff = (int32_t) internalTS - MCU_inst->internalTS;
-
- switch (MCU_inst->scalingFactor)
- {
- case kTSscalingTwo:
- {
- /* divide by 2 */
- timestampDiff = WEBRTC_SPL_RSHIFT_W32(timestampDiff, 1);
- break;
- }
- case kTSscalingTwoThirds:
- {
- /* multiply with 3/2 */
- timestampDiff = WEBRTC_SPL_MUL_32_16(timestampDiff, 3);
- timestampDiff = WEBRTC_SPL_RSHIFT_W32(timestampDiff, 1);
- break;
- }
- case kTSscalingFourThirds:
- {
- /* multiply with 3/4 */
- timestampDiff = WEBRTC_SPL_MUL_32_16(timestampDiff, 3);
- timestampDiff = WEBRTC_SPL_RSHIFT_W32(timestampDiff, 2);
- break;
- }
- default:
- {
- /* no scaling */
- }
- }
-
- /* add the scaled difference to last scaled timestamp and save ... */
- externalTS = MCU_inst->externalTS + timestampDiff;
-
- return externalTS;
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/recout.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/recout.c
deleted file mode 100644
index 8f62007310c..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/recout.c
+++ /dev/null
@@ -1,1502 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Implementation of RecOut function, which is the main function for the audio output
- * process. This function must be called (through the NetEQ API) once every 10 ms.
- */
-
-#include "dsp.h"
-
-#include <assert.h>
-#include <string.h> /* to define NULL */
-
-#include "signal_processing_library.h"
-
-#include "dsp_helpfunctions.h"
-#include "neteq_error_codes.h"
-#include "neteq_defines.h"
-#include "mcu_dsp_common.h"
-
-/* Audio types */
-#define TYPE_SPEECH 1
-#define TYPE_CNG 2
-
-#ifdef NETEQ_DELAY_LOGGING
-#include "delay_logging.h"
-#include <stdio.h>
-#pragma message("*******************************************************************")
-#pragma message("You have specified to use NETEQ_DELAY_LOGGING in the NetEQ library.")
-#pragma message("Make sure that your test application supports this.")
-#pragma message("*******************************************************************")
-#endif
-
-/* Scratch usage:
-
- Type Name size startpos endpos
- int16_t pw16_NetEqAlgorithm_buffer 1080*fs/8000 0 1080*fs/8000-1
- struct dspInfo 6 1080*fs/8000 1085*fs/8000
-
- func WebRtcNetEQ_Normal 40+495*fs/8000 0 39+495*fs/8000
- func WebRtcNetEQ_Merge 40+496*fs/8000 0 39+496*fs/8000
- func WebRtcNetEQ_Expand 40+370*fs/8000 126*fs/800 39+496*fs/8000
- func WebRtcNetEQ_Accelerate 210 240*fs/8000 209+240*fs/8000
- func WebRtcNetEQ_BGNUpdate 69 480*fs/8000 68+480*fs/8000
-
- Total: 1086*fs/8000
- */
-
-#define SCRATCH_ALGORITHM_BUFFER 0
-#define SCRATCH_NETEQ_NORMAL 0
-#define SCRATCH_NETEQ_MERGE 0
-
-#if (defined(NETEQ_48KHZ_WIDEBAND))
-#define SCRATCH_DSP_INFO 6480
-#define SCRATCH_NETEQ_ACCELERATE 1440
-#define SCRATCH_NETEQ_BGN_UPDATE 2880
-#define SCRATCH_NETEQ_EXPAND 756
-#elif (defined(NETEQ_32KHZ_WIDEBAND))
-#define SCRATCH_DSP_INFO 4320
-#define SCRATCH_NETEQ_ACCELERATE 960
-#define SCRATCH_NETEQ_BGN_UPDATE 1920
-#define SCRATCH_NETEQ_EXPAND 504
-#elif (defined(NETEQ_WIDEBAND))
-#define SCRATCH_DSP_INFO 2160
-#define SCRATCH_NETEQ_ACCELERATE 480
-#define SCRATCH_NETEQ_BGN_UPDATE 960
-#define SCRATCH_NETEQ_EXPAND 252
-#else /* NB */
-#define SCRATCH_DSP_INFO 1080
-#define SCRATCH_NETEQ_ACCELERATE 240
-#define SCRATCH_NETEQ_BGN_UPDATE 480
-#define SCRATCH_NETEQ_EXPAND 126
-#endif
-
-#if (defined(NETEQ_48KHZ_WIDEBAND))
-#define SIZE_SCRATCH_BUFFER 6516
-#elif (defined(NETEQ_32KHZ_WIDEBAND))
-#define SIZE_SCRATCH_BUFFER 4344
-#elif (defined(NETEQ_WIDEBAND))
-#define SIZE_SCRATCH_BUFFER 2172
-#else /* NB */
-#define SIZE_SCRATCH_BUFFER 1086
-#endif
-
-#ifdef NETEQ_DELAY_LOGGING
-extern FILE *delay_fid2; /* file pointer to delay log file */
-extern uint32_t tot_received_packets;
-#endif
-
-
-int WebRtcNetEQ_RecOutInternal(DSPInst_t *inst, int16_t *pw16_outData,
- int16_t *pw16_len, int16_t BGNonly,
- int av_sync)
-{
-
- int16_t blockLen, payloadLen, len = 0, pos;
- int16_t w16_tmp1, w16_tmp2, w16_tmp3, DataEnough;
- int16_t *blockPtr;
- int16_t MD = 0;
-
- int16_t speechType = TYPE_SPEECH;
- uint16_t instr;
- uint16_t uw16_tmp;
-#ifdef SCRATCH
- char pw8_ScratchBuffer[((SIZE_SCRATCH_BUFFER + 1) * 2)];
- int16_t *pw16_scratchPtr = (int16_t*) pw8_ScratchBuffer;
- /* pad with 240*fs_mult to match the overflow guard below */
- int16_t pw16_decoded_buffer[NETEQ_MAX_FRAME_SIZE+240*6];
- int16_t *pw16_NetEqAlgorithm_buffer = pw16_scratchPtr
- + SCRATCH_ALGORITHM_BUFFER;
- DSP2MCU_info_t *dspInfo = (DSP2MCU_info_t*) (pw16_scratchPtr + SCRATCH_DSP_INFO);
-#else
- /* pad with 240*fs_mult to match the overflow guard below */
- int16_t pw16_decoded_buffer[NETEQ_MAX_FRAME_SIZE+240*6];
- int16_t pw16_NetEqAlgorithm_buffer[NETEQ_MAX_OUTPUT_SIZE+240*6];
- DSP2MCU_info_t dspInfoStruct;
- DSP2MCU_info_t *dspInfo = &dspInfoStruct;
-#endif
- int16_t fs_mult;
- int borrowedSamples;
- int oldBorrowedSamples;
- int return_value = 0;
- int16_t lastModeBGNonly = (inst->w16_mode & MODE_BGN_ONLY) != 0; /* check BGN flag */
- void *mainInstBackup = inst->main_inst;
-
-#ifdef NETEQ_DELAY_LOGGING
- int temp_var;
-#endif
- int16_t dtmfValue = -1;
- int16_t dtmfVolume = -1;
- int playDtmf = 0;
-#ifdef NETEQ_ATEVENT_DECODE
- int dtmfSwitch = 0;
-#endif
-#ifdef NETEQ_STEREO
- MasterSlaveInfo *msInfo = inst->msInfo;
-#endif
- int16_t *sharedMem = pw16_NetEqAlgorithm_buffer; /* Reuse memory SHARED_MEM_SIZE size */
- inst->pw16_readAddress = sharedMem;
- inst->pw16_writeAddress = sharedMem;
-
- /* Get information about if there is one descriptor left */
- if (inst->codec_ptr_inst.funcGetMDinfo != NULL)
- {
- MD = inst->codec_ptr_inst.funcGetMDinfo(inst->codec_ptr_inst.codec_state);
- if (MD > 0)
- MD = 1;
- else
- MD = 0;
- }
-
-#ifdef NETEQ_STEREO
- if ((msInfo->msMode == NETEQ_SLAVE) && (inst->codec_ptr_inst.funcDecode != NULL))
- {
- /*
- * Valid function pointers indicate that we have decoded something,
- * and that the timestamp information is correct.
- */
-
- /* Get the information from master to correct synchronization */
- uint32_t currentMasterTimestamp;
- uint32_t currentSlaveTimestamp;
-
- currentMasterTimestamp = msInfo->endTimestamp - msInfo->samplesLeftWithOverlap;
- currentSlaveTimestamp = inst->endTimestamp - (inst->endPosition - inst->curPosition);
-
- /* Partition the uint32_t space in three: [0 0.25) [0.25 0.75] (0.75 1]
- * We consider a wrap to have occurred if the timestamps are in
- * different edge partitions.
- */
- if (currentSlaveTimestamp < 0x40000000 &&
- currentMasterTimestamp > 0xc0000000) {
- // Slave has wrapped.
- currentSlaveTimestamp += (0xffffffff - currentMasterTimestamp) + 1;
- currentMasterTimestamp = 0;
- } else if (currentMasterTimestamp < 0x40000000 &&
- currentSlaveTimestamp > 0xc0000000) {
- // Master has wrapped.
- currentMasterTimestamp += (0xffffffff - currentSlaveTimestamp) + 1;
- currentSlaveTimestamp = 0;
- }
-
- if (currentSlaveTimestamp < currentMasterTimestamp)
- {
- /* brute-force discard a number of samples to catch up */
- inst->curPosition += currentMasterTimestamp - currentSlaveTimestamp;
-
- }
- else if (currentSlaveTimestamp > currentMasterTimestamp)
- {
- /* back off current position to slow down */
- inst->curPosition -= currentSlaveTimestamp - currentMasterTimestamp;
- }
-
- /* make sure we have at least "overlap" samples left */
- inst->curPosition = WEBRTC_SPL_MIN(inst->curPosition,
- inst->endPosition - inst->ExpandInst.w16_overlap);
-
- /* make sure we do not end up outside the speech history */
- inst->curPosition = WEBRTC_SPL_MAX(inst->curPosition, 0);
- }
-#endif
-
- /* Write status data to shared memory */
- dspInfo->playedOutTS = inst->endTimestamp;
- dspInfo->samplesLeft = inst->endPosition - inst->curPosition
- - inst->ExpandInst.w16_overlap;
- dspInfo->MD = MD;
- dspInfo->lastMode = inst->w16_mode;
- dspInfo->frameLen = inst->w16_frameLen;
-
- /* Force update of codec if codec function is NULL */
- if (inst->codec_ptr_inst.funcDecode == NULL)
- {
- dspInfo->lastMode |= MODE_AWAITING_CODEC_PTR;
- }
-
-#ifdef NETEQ_STEREO
- if (msInfo->msMode == NETEQ_SLAVE && (msInfo->extraInfo == DTMF_OVERDUB
- || msInfo->extraInfo == DTMF_ONLY))
- {
- /* Signal that the master instance generated DTMF tones */
- dspInfo->lastMode |= MODE_MASTER_DTMF_SIGNAL;
- }
-
- if (msInfo->msMode != NETEQ_MONO)
- {
- /* We are using stereo mode; signal this to MCU side */
- dspInfo->lastMode |= MODE_USING_STEREO;
- }
-#endif
-
- WEBRTC_SPL_MEMCPY_W8(inst->pw16_writeAddress,dspInfo,sizeof(DSP2MCU_info_t));
-
- /* Signal MCU with "interrupt" call to main inst*/
-#ifdef NETEQ_STEREO
- assert(msInfo != NULL);
- if (msInfo->msMode == NETEQ_MASTER)
- {
- /* clear info to slave */
- WebRtcSpl_MemSetW16((int16_t *) msInfo, 0,
- sizeof(MasterSlaveInfo) / sizeof(int16_t));
- /* re-set mode */
- msInfo->msMode = NETEQ_MASTER;
-
- /* Store some information to slave */
- msInfo->endTimestamp = inst->endTimestamp;
- msInfo->samplesLeftWithOverlap = inst->endPosition - inst->curPosition;
- }
-#endif
-
- /*
- * This call will trigger the MCU side to make a decision based on buffer contents and
- * decision history. Instructions, encoded data and function pointers will be written
- * to the shared memory.
- */
- return_value = WebRtcNetEQ_DSP2MCUinterrupt((MainInst_t *) inst->main_inst, sharedMem);
-
- /* Read MCU data and instructions */
- instr = (uint16_t) (inst->pw16_readAddress[0] & 0xf000);
-
-#ifdef NETEQ_STEREO
- if (msInfo->msMode == NETEQ_MASTER)
- {
- msInfo->instruction = instr;
- }
- else if (msInfo->msMode == NETEQ_SLAVE)
- {
- /* Nothing to do */
- }
-#endif
-
- /* check for error returned from MCU side, if so, return error */
- if (return_value < 0)
- {
- inst->w16_mode = MODE_ERROR;
- dspInfo->lastMode = MODE_ERROR;
- return return_value;
- }
-
- blockPtr = &((inst->pw16_readAddress)[3]);
-
- /* Check for DTMF payload flag */
- if ((inst->pw16_readAddress[0] & DSP_DTMF_PAYLOAD) != 0)
- {
- playDtmf = 1;
- dtmfValue = blockPtr[1];
- dtmfVolume = blockPtr[2];
- blockPtr += 3;
-
-#ifdef NETEQ_STEREO
- if (msInfo->msMode == NETEQ_MASTER)
- {
- /* signal to slave that master is using DTMF */
- msInfo->extraInfo = DTMF_OVERDUB;
- }
-#endif
- }
-
- blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1; /* In # of int16_t */
- payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
- blockPtr++;
-
- /* Do we have to change our decoder? */
- if ((inst->pw16_readAddress[0] & 0x0f00) == DSP_CODEC_NEW_CODEC)
- {
- WEBRTC_SPL_MEMCPY_W16(&inst->codec_ptr_inst,blockPtr,(payloadLen+1)>>1);
- if (inst->codec_ptr_inst.codec_fs != 0)
- {
- return_value = WebRtcNetEQ_DSPInit(inst, inst->codec_ptr_inst.codec_fs);
- if (return_value != 0)
- { /* error returned */
- instr = DSP_INSTR_FADE_TO_BGN; /* emergency instruction */
- }
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_CHANGE_FS;
- if ((fwrite(&temp_var, sizeof(int),
- 1, delay_fid2) != 1) ||
- (fwrite(&inst->fs, sizeof(uint16_t),
- 1, delay_fid2) != 1)) {
- return -1;
- }
-#endif
- }
-
- /* Copy it again since the init destroys this part */
-
- WEBRTC_SPL_MEMCPY_W16(&inst->codec_ptr_inst,blockPtr,(payloadLen+1)>>1);
- inst->endTimestamp = inst->codec_ptr_inst.timeStamp;
- inst->videoSyncTimestamp = inst->codec_ptr_inst.timeStamp;
- blockPtr += blockLen;
- blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1;
- payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
- blockPtr++;
- if (inst->codec_ptr_inst.funcDecodeInit != NULL)
- {
- inst->codec_ptr_inst.funcDecodeInit(inst->codec_ptr_inst.codec_state);
- }
-
-#ifdef NETEQ_CNG_CODEC
-
- /* Also update the CNG state as this might be uninitialized */
-
- WEBRTC_SPL_MEMCPY_W16(&inst->CNG_Codec_inst,blockPtr,(payloadLen+1)>>1);
- blockPtr += blockLen;
- blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1;
- payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
- blockPtr++;
- if (inst->CNG_Codec_inst != NULL)
- {
- WebRtcCng_InitDec(inst->CNG_Codec_inst);
- }
-#endif
- }
- else if ((inst->pw16_readAddress[0] & 0x0f00) == DSP_CODEC_RESET)
- {
- /* Reset the current codec (but not DSP struct) */
- if (inst->codec_ptr_inst.funcDecodeInit != NULL)
- {
- inst->codec_ptr_inst.funcDecodeInit(inst->codec_ptr_inst.codec_state);
- }
-
-#ifdef NETEQ_CNG_CODEC
- /* And reset CNG */
- if (inst->CNG_Codec_inst != NULL)
- {
- WebRtcCng_InitDec(inst->CNG_Codec_inst);
- }
-#endif /*NETEQ_CNG_CODEC*/
- }
-
- fs_mult = WebRtcNetEQ_CalcFsMult(inst->fs);
-
- /* Add late packet? */
- if ((inst->pw16_readAddress[0] & 0x0f00) == DSP_CODEC_ADD_LATE_PKT)
- {
- if (inst->codec_ptr_inst.funcAddLatePkt != NULL)
- {
- /* Only do this if the codec has support for Add Late Pkt */
- inst->codec_ptr_inst.funcAddLatePkt(inst->codec_ptr_inst.codec_state, blockPtr,
- payloadLen);
- }
- blockPtr += blockLen;
- blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1; /* In # of Word16 */
- payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
- blockPtr++;
- }
-
- /* Do we have to decode data? */
- if ((instr == DSP_INSTR_NORMAL) || (instr == DSP_INSTR_ACCELERATE) || (instr
- == DSP_INSTR_MERGE) || (instr == DSP_INSTR_PREEMPTIVE_EXPAND))
- {
- /* Do we need to update codec-internal PLC state? */
- if ((instr == DSP_INSTR_MERGE) && (inst->codec_ptr_inst.funcDecodePLC != NULL))
- {
- len = 0;
- len = inst->codec_ptr_inst.funcDecodePLC(inst->codec_ptr_inst.codec_state,
- &pw16_decoded_buffer[len], 1);
- }
- len = 0;
-
- /* Do decoding */
- while ((blockLen > 0) && (len < (240 * fs_mult))) /* Guard somewhat against overflow */
- {
- if (inst->codec_ptr_inst.funcDecode != NULL)
- {
- int16_t dec_Len;
- if (!BGNonly)
- {
- /* Check if this is a sync payload. */
- if (av_sync && WebRtcNetEQ_IsSyncPayload(blockPtr,
- payloadLen)) {
- /* Zero-stuffing with same size as the last frame. */
- dec_Len = inst->w16_frameLen;
- memset(&pw16_decoded_buffer[len], 0, dec_Len *
- sizeof(pw16_decoded_buffer[len]));
- } else {
- /* Do decoding as normal
- *
- * blockPtr is pointing to payload, at this point,
- * the most significant bit of *(blockPtr - 1) is a flag if
- * set to 1 indicates that the following payload is the
- * redundant payload.
- */
- if (((*(blockPtr - 1) & DSP_CODEC_RED_FLAG) != 0)
- && (inst->codec_ptr_inst.funcDecodeRCU != NULL))
- {
- dec_Len = inst->codec_ptr_inst.funcDecodeRCU(
- inst->codec_ptr_inst.codec_state, blockPtr,
- payloadLen, &pw16_decoded_buffer[len], &speechType);
- }
- else
- {
- /* Regular decoding. */
- dec_Len = inst->codec_ptr_inst.funcDecode(
- inst->codec_ptr_inst.codec_state, blockPtr,
- payloadLen, &pw16_decoded_buffer[len], &speechType);
- }
- }
- }
- else
- {
- /*
- * Background noise mode: don't decode, just produce the same length BGN.
- * Don't call Expand for BGN here, since Expand uses the memory where the
- * bitstreams are stored (sharemem).
- */
- dec_Len = inst->w16_frameLen;
- }
-
- if (dec_Len > 0)
- {
- len += dec_Len;
- /* Update frameLen */
- inst->w16_frameLen = dec_Len;
- }
- else if (dec_Len < 0)
- {
- /* Error */
- len = -1;
- break;
- }
- /*
- * Sanity check (although we might still write outside memory when this
- * happens...)
- */
- if (len > NETEQ_MAX_FRAME_SIZE)
- {
- WebRtcSpl_MemSetW16(pw16_outData, 0, inst->timestampsPerCall);
- *pw16_len = inst->timestampsPerCall;
- inst->w16_mode = MODE_ERROR;
- dspInfo->lastMode = MODE_ERROR;
- return RECOUT_ERROR_DECODED_TOO_MUCH;
- }
-
- /* Verify that instance was not corrupted by decoder */
- if (mainInstBackup != inst->main_inst)
- {
- /* Instance is corrupt */
- return CORRUPT_INSTANCE;
- }
-
- }
- blockPtr += blockLen;
- blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1; /* In # of Word16 */
- payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
- blockPtr++;
- }
-
- if (len < 0)
- {
- len = 0;
- inst->endTimestamp += inst->w16_frameLen; /* advance one frame */
- if (inst->codec_ptr_inst.funcGetErrorCode != NULL)
- {
- return_value = -inst->codec_ptr_inst.funcGetErrorCode(
- inst->codec_ptr_inst.codec_state);
- }
- else
- {
- return_value = RECOUT_ERROR_DECODING;
- }
- instr = DSP_INSTR_FADE_TO_BGN;
- }
- if (speechType != TYPE_CNG)
- {
- /*
- * Don't increment timestamp if codec returned CNG speech type
- * since in this case, the MCU side will increment the CNGplayedTS counter.
- */
- inst->endTimestamp += len;
- }
- }
- else if (instr == DSP_INSTR_NORMAL_ONE_DESC)
- {
- if (inst->codec_ptr_inst.funcDecode != NULL)
- {
- len = inst->codec_ptr_inst.funcDecode(inst->codec_ptr_inst.codec_state, NULL, 0,
- pw16_decoded_buffer, &speechType);
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_DECODE_ONE_DESC;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
- if (fwrite(&inst->endTimestamp, sizeof(uint32_t),
- 1, delay_fid2) != 1) {
- return -1;
- }
- if (fwrite(&dspInfo->samplesLeft, sizeof(uint16_t),
- 1, delay_fid2) != 1) {
- return -1;
- }
- tot_received_packets++;
-#endif
- }
- if (speechType != TYPE_CNG)
- {
- /*
- * Don't increment timestamp if codec returned CNG speech type
- * since in this case, the MCU side will increment the CNGplayedTS counter.
- */
- inst->endTimestamp += len;
- }
-
- /* Verify that instance was not corrupted by decoder */
- if (mainInstBackup != inst->main_inst)
- {
- /* Instance is corrupt */
- return CORRUPT_INSTANCE;
- }
-
- if (len <= 0)
- {
- len = 0;
- if (inst->codec_ptr_inst.funcGetErrorCode != NULL)
- {
- return_value = -inst->codec_ptr_inst.funcGetErrorCode(
- inst->codec_ptr_inst.codec_state);
- }
- else
- {
- return_value = RECOUT_ERROR_DECODING;
- }
- if ((inst->codec_ptr_inst.funcDecodeInit != NULL)
- && (inst->codec_ptr_inst.codec_state != NULL))
- {
- /* Reinitialize codec state as something is obviously wrong */
- inst->codec_ptr_inst.funcDecodeInit(inst->codec_ptr_inst.codec_state);
- }
- inst->endTimestamp += inst->w16_frameLen; /* advance one frame */
- instr = DSP_INSTR_FADE_TO_BGN;
- }
- }
-
- if (len == 0 && lastModeBGNonly) /* no new data */
- {
- BGNonly = 1; /* force BGN this time too */
- }
-
-#ifdef NETEQ_VAD
- if ((speechType == TYPE_CNG) /* decoder responded with codec-internal CNG */
- || ((instr == DSP_INSTR_DO_RFC3389CNG) && (blockLen > 0)) /* ... or, SID frame */
- || (inst->fs > 16000)) /* ... or, if not NB or WB */
- {
- /* disable post-decode VAD upon first sign of send-side DTX/VAD active, or if SWB */
- inst->VADInst.VADEnabled = 0;
- inst->VADInst.VADDecision = 1; /* set to always active, just to be on the safe side */
- inst->VADInst.SIDintervalCounter = 0; /* reset SID interval counter */
- }
- else if (!inst->VADInst.VADEnabled) /* VAD disabled and no SID/CNG data observed this time */
- {
- inst->VADInst.SIDintervalCounter++; /* increase counter */
- }
-
- /* check for re-enabling the VAD */
- if (inst->VADInst.SIDintervalCounter >= POST_DECODE_VAD_AUTO_ENABLE)
- {
- /*
- * It's been a while since the last CNG/SID frame was observed => re-enable VAD.
- * (Do not care to look for a VAD instance, since this is done inside the init
- * function)
- */
- WebRtcNetEQ_InitVAD(&inst->VADInst, inst->fs);
- }
-
- if (len > 0 /* if we decoded any data */
- && inst->VADInst.VADEnabled /* and VAD enabled */
- && inst->fs <= 16000) /* can only do VAD for NB and WB */
- {
- int VADframeSize; /* VAD frame size in ms */
- int VADSamplePtr = 0;
-
- inst->VADInst.VADDecision = 0;
-
- if (inst->VADInst.VADFunction != NULL) /* make sure that VAD function is provided */
- {
- /* divide the data into groups, as large as possible */
- for (VADframeSize = 30; VADframeSize >= 10; VADframeSize -= 10)
- {
- /* loop through 30, 20, 10 */
-
- while (inst->VADInst.VADDecision == 0
- && len - VADSamplePtr >= VADframeSize * fs_mult * 8)
- {
- /*
- * Only continue until first active speech found, and as long as there is
- * one VADframeSize left.
- */
-
- /* call VAD with new decoded data */
- inst->VADInst.VADDecision |= inst->VADInst.VADFunction(
- inst->VADInst.VADState, (int) inst->fs,
- (int16_t *) &pw16_decoded_buffer[VADSamplePtr],
- (VADframeSize * fs_mult * 8));
-
- VADSamplePtr += VADframeSize * fs_mult * 8; /* increment sample counter */
- }
- }
- }
- else
- { /* VAD function is NULL */
- inst->VADInst.VADDecision = 1; /* set decision to active */
- inst->VADInst.VADEnabled = 0; /* disable VAD since we have no VAD function */
- }
-
- }
-#endif /* NETEQ_VAD */
-
- /* Adjust timestamp if needed */
- uw16_tmp = (uint16_t) inst->pw16_readAddress[1];
- inst->endTimestamp += (((uint32_t) uw16_tmp) << 16);
- uw16_tmp = (uint16_t) inst->pw16_readAddress[2];
- inst->endTimestamp += uw16_tmp;
-
- if (BGNonly && len > 0)
- {
- /*
- * If BGN mode, we did not produce any data at decoding.
- * Do it now instead.
- */
-
- WebRtcNetEQ_GenerateBGN(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
-#endif
- pw16_decoded_buffer, len);
- }
-
- /* Switch on the instruction received from the MCU side. */
- switch (instr)
- {
- case DSP_INSTR_NORMAL:
-
- /* Allow for signal processing to apply gain-back etc */
- WebRtcNetEQ_Normal(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_NORMAL,
-#endif
- pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
-
- /* If last packet was decoded as a inband CNG set mode to CNG instead */
- if ((speechType == TYPE_CNG) || ((inst->w16_mode == MODE_CODEC_INTERNAL_CNG)
- && (len == 0)))
- {
- inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
- }
-
-#ifdef NETEQ_ATEVENT_DECODE
- if (playDtmf == 0)
- {
- inst->DTMFInst.reinit = 1;
- }
-#endif
- break;
- case DSP_INSTR_NORMAL_ONE_DESC:
-
- /* Allow for signal processing to apply gain-back etc */
- WebRtcNetEQ_Normal(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_NORMAL,
-#endif
- pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
-#ifdef NETEQ_ATEVENT_DECODE
- if (playDtmf == 0)
- {
- inst->DTMFInst.reinit = 1;
- }
-#endif
- inst->w16_mode = MODE_ONE_DESCRIPTOR;
- break;
- case DSP_INSTR_MERGE:
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_MERGE_INFO;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
- temp_var = -len;
-#endif
- /* Call Merge with history*/
- return_value = WebRtcNetEQ_Merge(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_MERGE,
-#endif
- pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
-
- if (return_value < 0)
- {
- /* error */
- return return_value;
- }
-
-#ifdef NETEQ_DELAY_LOGGING
- temp_var += len;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
-#endif
- /* If last packet was decoded as a inband CNG set mode to CNG instead */
- if (speechType == TYPE_CNG) inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
-#ifdef NETEQ_ATEVENT_DECODE
- if (playDtmf == 0)
- {
- inst->DTMFInst.reinit = 1;
- }
-#endif
- break;
-
- case DSP_INSTR_EXPAND:
- len = 0;
- pos = 0;
- while ((inst->endPosition - inst->curPosition - inst->ExpandInst.w16_overlap + pos)
- < (inst->timestampsPerCall))
- {
- return_value = WebRtcNetEQ_Expand(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
-#endif
- pw16_NetEqAlgorithm_buffer, &len, BGNonly);
- if (return_value < 0)
- {
- /* error */
- return return_value;
- }
-
- /*
- * Update buffer, but only end part (otherwise expand state is destroyed
- * since it reuses speechBuffer[] memory
- */
-
- WEBRTC_SPL_MEMMOVE_W16(inst->pw16_speechHistory,
- inst->pw16_speechHistory + len,
- (inst->w16_speechHistoryLen-len));
- WEBRTC_SPL_MEMCPY_W16(&inst->pw16_speechHistory[inst->w16_speechHistoryLen-len],
- pw16_NetEqAlgorithm_buffer, len);
-
- inst->curPosition -= len;
-
- /* Update variables for VQmon */
- inst->w16_concealedTS += len;
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_EXPAND_INFO;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
- temp_var = len;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
-#endif
- len = 0; /* already written the data, so do not write it again further down. */
- }
-#ifdef NETEQ_ATEVENT_DECODE
- if (playDtmf == 0)
- {
- inst->DTMFInst.reinit = 1;
- }
-#endif
- break;
-
- case DSP_INSTR_ACCELERATE:
- if (len < 3 * 80 * fs_mult)
- {
- /* We need to move data from the speechBuffer[] in order to get 30 ms */
- borrowedSamples = 3 * 80 * fs_mult - len;
-
- WEBRTC_SPL_MEMMOVE_W16(&pw16_decoded_buffer[borrowedSamples],
- pw16_decoded_buffer, len);
- WEBRTC_SPL_MEMCPY_W16(pw16_decoded_buffer,
- &(inst->speechBuffer[inst->endPosition-borrowedSamples]),
- borrowedSamples);
-
- return_value = WebRtcNetEQ_Accelerate(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_ACCELERATE,
-#endif
- pw16_decoded_buffer, 3 * inst->timestampsPerCall,
- pw16_NetEqAlgorithm_buffer, &len, BGNonly);
-
- if (return_value < 0)
- {
- /* error */
- return return_value;
- }
-
- /* Copy back samples to the buffer */
- if (len < borrowedSamples)
- {
- /*
- * This destroys the beginning of the buffer, but will not cause any
- * problems
- */
-
- WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->endPosition-borrowedSamples],
- pw16_NetEqAlgorithm_buffer, len);
- WEBRTC_SPL_MEMMOVE_W16(&inst->speechBuffer[borrowedSamples-len],
- inst->speechBuffer,
- (inst->endPosition-(borrowedSamples-len)));
-
- inst->curPosition += (borrowedSamples - len);
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
- temp_var = 3 * inst->timestampsPerCall - len;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
-#endif
- len = 0;
- }
- else
- {
- WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->endPosition-borrowedSamples],
- pw16_NetEqAlgorithm_buffer, borrowedSamples);
- WEBRTC_SPL_MEMMOVE_W16(pw16_NetEqAlgorithm_buffer,
- &pw16_NetEqAlgorithm_buffer[borrowedSamples],
- (len-borrowedSamples));
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
- temp_var = 3 * inst->timestampsPerCall - len;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
-#endif
- len = len - borrowedSamples;
- }
-
- }
- else
- {
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
- temp_var = len;
-#endif
- return_value = WebRtcNetEQ_Accelerate(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_ACCELERATE,
-#endif
- pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len, BGNonly);
-
- if (return_value < 0)
- {
- /* error */
- return return_value;
- }
-
-#ifdef NETEQ_DELAY_LOGGING
- temp_var -= len;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
-#endif
- }
- /* If last packet was decoded as a inband CNG set mode to CNG instead */
- if (speechType == TYPE_CNG) inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
-#ifdef NETEQ_ATEVENT_DECODE
- if (playDtmf == 0)
- {
- inst->DTMFInst.reinit = 1;
- }
-#endif
- break;
-
- case DSP_INSTR_DO_RFC3389CNG:
-#ifdef NETEQ_CNG_CODEC
- if (blockLen > 0)
- {
- if (WebRtcCng_UpdateSid(inst->CNG_Codec_inst, (uint8_t*) blockPtr,
- payloadLen) < 0)
- {
- /* error returned from CNG function */
- return_value = -WebRtcCng_GetErrorCodeDec(inst->CNG_Codec_inst);
- len = inst->timestampsPerCall;
- WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
- break;
- }
- }
-
- if (BGNonly)
- {
- /* Get data from BGN function instead of CNG */
- len = WebRtcNetEQ_GenerateBGN(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
-#endif
- pw16_NetEqAlgorithm_buffer, inst->timestampsPerCall);
- if (len != inst->timestampsPerCall)
- {
- /* this is not good, treat this as an error */
- return_value = -1;
- }
- }
- else
- {
- return_value = WebRtcNetEQ_Cng(inst, pw16_NetEqAlgorithm_buffer,
- inst->timestampsPerCall);
- }
- len = inst->timestampsPerCall;
- inst->ExpandInst.w16_consecExp = 0;
- inst->w16_mode = MODE_RFC3389CNG;
-#ifdef NETEQ_ATEVENT_DECODE
- if (playDtmf == 0)
- {
- inst->DTMFInst.reinit = 1;
- }
-#endif
-
- if (return_value < 0)
- {
- /* error returned */
- WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
- }
-
- break;
-#else
- return FAULTY_INSTRUCTION;
-#endif
- case DSP_INSTR_DO_CODEC_INTERNAL_CNG:
- /*
- * This represents the case when there is no transmission and the decoder should
- * do internal CNG.
- */
- len = 0;
- if (inst->codec_ptr_inst.funcDecode != NULL && !BGNonly)
- {
- len = inst->codec_ptr_inst.funcDecode(inst->codec_ptr_inst.codec_state,
- blockPtr, 0, pw16_decoded_buffer, &speechType);
- }
- else
- {
- /* get BGN data */
- len = WebRtcNetEQ_GenerateBGN(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
-#endif
- pw16_decoded_buffer, inst->timestampsPerCall);
- }
- WebRtcNetEQ_Normal(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_NORMAL,
-#endif
- pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
- inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
- inst->ExpandInst.w16_consecExp = 0;
- break;
-
- case DSP_INSTR_DTMF_GENERATE:
-#ifdef NETEQ_ATEVENT_DECODE
- dtmfSwitch = 0;
- if ((inst->w16_mode != MODE_DTMF) && (inst->DTMFInst.reinit == 0))
- {
- /* Special case; see below.
- * We must catch this before calling DTMFGenerate,
- * since reinit is set to 0 in that call.
- */
- dtmfSwitch = 1;
- }
-
- len = WebRtcNetEQ_DTMFGenerate(&inst->DTMFInst, dtmfValue, dtmfVolume,
- pw16_NetEqAlgorithm_buffer, inst->fs, -1);
- if (len < 0)
- {
- /* error occurred */
- return_value = len;
- len = inst->timestampsPerCall;
- WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
- }
-
- if (dtmfSwitch == 1)
- {
- /*
- * This is the special case where the previous operation was DTMF overdub.
- * but the current instruction is "regular" DTMF. We must make sure that the
- * DTMF does not have any discontinuities. The first DTMF sample that we
- * generate now must be played out immediately, wherefore it must be copied to
- * the speech buffer.
- */
-
- /*
- * Generate extra DTMF data to fill the space between
- * curPosition and endPosition
- */
- int16_t tempLen;
-
- tempLen = WebRtcNetEQ_DTMFGenerate(&inst->DTMFInst, dtmfValue, dtmfVolume,
- &pw16_NetEqAlgorithm_buffer[len], inst->fs,
- inst->endPosition - inst->curPosition);
- if (tempLen < 0)
- {
- /* error occurred */
- return_value = tempLen;
- len = inst->endPosition - inst->curPosition;
- WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0,
- inst->endPosition - inst->curPosition);
- }
-
- /* Add to total length */
- len += tempLen;
-
- /* Overwrite the "future" part of the speech buffer with the new DTMF data */
-
- WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->curPosition],
- pw16_NetEqAlgorithm_buffer,
- inst->endPosition - inst->curPosition);
-
- /* Shuffle the remaining data to the beginning of algorithm buffer */
- len -= (inst->endPosition - inst->curPosition);
- WEBRTC_SPL_MEMMOVE_W16(pw16_NetEqAlgorithm_buffer,
- &pw16_NetEqAlgorithm_buffer[inst->endPosition - inst->curPosition],
- len);
- }
-
- inst->endTimestamp += inst->timestampsPerCall;
- inst->DTMFInst.reinit = 0;
- inst->ExpandInst.w16_consecExp = 0;
- inst->w16_mode = MODE_DTMF;
- BGNonly = 0; /* override BGN only and let DTMF through */
-
- playDtmf = 0; /* set to zero because the DTMF is already in the Algorithm buffer */
- /*
- * If playDtmf is 1, an extra DTMF vector will be generated and overdubbed
- * on the output.
- */
-
-#ifdef NETEQ_STEREO
- if (msInfo->msMode == NETEQ_MASTER)
- {
- /* signal to slave that master is using DTMF only */
- msInfo->extraInfo = DTMF_ONLY;
- }
-#endif
-
- break;
-#else
- inst->w16_mode = MODE_ERROR;
- dspInfo->lastMode = MODE_ERROR;
- return FAULTY_INSTRUCTION;
-#endif
-
- case DSP_INSTR_DO_ALTERNATIVE_PLC:
- if (inst->codec_ptr_inst.funcDecodePLC != 0)
- {
- len = inst->codec_ptr_inst.funcDecodePLC(inst->codec_ptr_inst.codec_state,
- pw16_NetEqAlgorithm_buffer, 1);
- }
- else
- {
- len = inst->timestampsPerCall;
- /* ZeroStuffing... */
- WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
- /* By not advancing the timestamp, NetEq inserts samples. */
- inst->statInst.addedSamples += len;
- }
- inst->ExpandInst.w16_consecExp = 0;
- break;
- case DSP_INSTR_DO_ALTERNATIVE_PLC_INC_TS:
- if (inst->codec_ptr_inst.funcDecodePLC != 0)
- {
- len = inst->codec_ptr_inst.funcDecodePLC(inst->codec_ptr_inst.codec_state,
- pw16_NetEqAlgorithm_buffer, 1);
- }
- else
- {
- len = inst->timestampsPerCall;
- /* ZeroStuffing... */
- WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
- }
- inst->ExpandInst.w16_consecExp = 0;
- inst->endTimestamp += len;
- break;
- case DSP_INSTR_DO_AUDIO_REPETITION:
- len = inst->timestampsPerCall;
- /* copy->paste... */
- WEBRTC_SPL_MEMCPY_W16(pw16_NetEqAlgorithm_buffer,
- &inst->speechBuffer[inst->endPosition-len], len);
- inst->ExpandInst.w16_consecExp = 0;
- break;
- case DSP_INSTR_DO_AUDIO_REPETITION_INC_TS:
- len = inst->timestampsPerCall;
- /* copy->paste... */
- WEBRTC_SPL_MEMCPY_W16(pw16_NetEqAlgorithm_buffer,
- &inst->speechBuffer[inst->endPosition-len], len);
- inst->ExpandInst.w16_consecExp = 0;
- inst->endTimestamp += len;
- break;
-
- case DSP_INSTR_PREEMPTIVE_EXPAND:
- if (len < 3 * inst->timestampsPerCall)
- {
- /* borrow samples from sync buffer if necessary */
- borrowedSamples = 3 * inst->timestampsPerCall - len; /* borrow this many samples */
- /* calculate how many of these are already played out */
- oldBorrowedSamples = WEBRTC_SPL_MAX(0,
- borrowedSamples - (inst->endPosition - inst->curPosition));
- WEBRTC_SPL_MEMMOVE_W16(&pw16_decoded_buffer[borrowedSamples],
- pw16_decoded_buffer, len);
- WEBRTC_SPL_MEMCPY_W16(pw16_decoded_buffer,
- &(inst->speechBuffer[inst->endPosition-borrowedSamples]),
- borrowedSamples);
- }
- else
- {
- borrowedSamples = 0;
- oldBorrowedSamples = 0;
- }
-
-#ifdef NETEQ_DELAY_LOGGING
- w16_tmp1 = len;
-#endif
- /* do the expand */
- return_value = WebRtcNetEQ_PreEmptiveExpand(inst,
-#ifdef SCRATCH
- /* use same scratch memory as Accelerate */
- pw16_scratchPtr + SCRATCH_NETEQ_ACCELERATE,
-#endif
- pw16_decoded_buffer, len + borrowedSamples, oldBorrowedSamples,
- pw16_NetEqAlgorithm_buffer, &len, BGNonly);
-
- if (return_value < 0)
- {
- /* error */
- return return_value;
- }
-
- if (borrowedSamples > 0)
- {
- /* return borrowed samples */
-
- /* Copy back to last part of speechBuffer from beginning of output buffer */
- WEBRTC_SPL_MEMCPY_W16( &(inst->speechBuffer[inst->endPosition-borrowedSamples]),
- pw16_NetEqAlgorithm_buffer,
- borrowedSamples);
-
- len -= borrowedSamples; /* remove the borrowed samples from new total length */
-
- /* Move to beginning of output buffer from end of output buffer */
- WEBRTC_SPL_MEMMOVE_W16( pw16_NetEqAlgorithm_buffer,
- &pw16_NetEqAlgorithm_buffer[borrowedSamples],
- len);
- }
-
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_PREEMPTIVE_INFO;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
- temp_var = len - w16_tmp1; /* number of samples added */
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
-#endif
- /* If last packet was decoded as inband CNG, set mode to CNG instead */
- if (speechType == TYPE_CNG) inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
-#ifdef NETEQ_ATEVENT_DECODE
- if (playDtmf == 0)
- {
- inst->DTMFInst.reinit = 1;
- }
-#endif
- break;
-
- case DSP_INSTR_FADE_TO_BGN:
- {
- int tempReturnValue;
- /* do not overwrite return_value, since it likely contains an error code */
-
- /* calculate interpolation length */
- w16_tmp3 = WEBRTC_SPL_MIN(inst->endPosition - inst->curPosition,
- inst->timestampsPerCall);
- /* check that it will fit in pw16_NetEqAlgorithm_buffer */
- if (w16_tmp3 + inst->w16_frameLen > NETEQ_MAX_OUTPUT_SIZE)
- {
- w16_tmp3 = NETEQ_MAX_OUTPUT_SIZE - inst->w16_frameLen;
- }
-
- /* call Expand */
- len = inst->timestampsPerCall + inst->ExpandInst.w16_overlap;
- pos = 0;
-
- tempReturnValue = WebRtcNetEQ_Expand(inst,
-#ifdef SCRATCH
- pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
-#endif
- pw16_NetEqAlgorithm_buffer, &len, 1);
-
- if (tempReturnValue < 0)
- {
- /* error */
- /* this error value will override return_value */
- return tempReturnValue;
- }
-
- pos += len; /* got len samples from expand */
-
- /* copy to fill the demand */
- while (pos + len <= inst->w16_frameLen + w16_tmp3)
- {
- WEBRTC_SPL_MEMCPY_W16(&pw16_NetEqAlgorithm_buffer[pos],
- pw16_NetEqAlgorithm_buffer, len);
- pos += len;
- }
-
- /* fill with fraction of the expand vector if needed */
- if (pos < inst->w16_frameLen + w16_tmp3)
- {
- WEBRTC_SPL_MEMCPY_W16(&pw16_NetEqAlgorithm_buffer[pos], pw16_NetEqAlgorithm_buffer,
- inst->w16_frameLen + w16_tmp3 - pos);
- }
-
- len = inst->w16_frameLen + w16_tmp3; /* truncate any surplus samples since we don't want these */
-
- /*
- * Mix with contents in sync buffer. Find largest power of two that is less than
- * interpolate length divide 16384 with this number; result is in w16_tmp2.
- */
- w16_tmp1 = 2;
- w16_tmp2 = 16384;
- while (w16_tmp1 <= w16_tmp3)
- {
- w16_tmp2 >>= 1; /* divide with 2 */
- w16_tmp1 <<= 1; /* increase with a factor of 2 */
- }
-
- w16_tmp1 = 0;
- pos = 0;
- while (w16_tmp1 < 16384)
- {
- inst->speechBuffer[inst->curPosition + pos]
- =
- (int16_t) WEBRTC_SPL_RSHIFT_W32(
- WEBRTC_SPL_MUL_16_16( inst->speechBuffer[inst->endPosition - w16_tmp3 + pos],
- 16384-w16_tmp1 ) +
- WEBRTC_SPL_MUL_16_16( pw16_NetEqAlgorithm_buffer[pos], w16_tmp1 ),
- 14 );
- w16_tmp1 += w16_tmp2;
- pos++;
- }
-
- /* overwrite remainder of speech buffer */
-
- WEBRTC_SPL_MEMCPY_W16( &inst->speechBuffer[inst->endPosition - w16_tmp3 + pos],
- &pw16_NetEqAlgorithm_buffer[pos], w16_tmp3 - pos);
-
- len -= w16_tmp3;
- /* shift algorithm buffer */
-
- WEBRTC_SPL_MEMMOVE_W16( pw16_NetEqAlgorithm_buffer,
- &pw16_NetEqAlgorithm_buffer[w16_tmp3],
- len );
-
- /* Update variables for VQmon */
- inst->w16_concealedTS += len;
-
- inst->w16_mode = MODE_FADE_TO_BGN;
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_EXPAND_INFO;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
- temp_var = len;
- if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
- return -1;
- }
-#endif
-
- break;
- }
-
- default:
- inst->w16_mode = MODE_ERROR;
- dspInfo->lastMode = MODE_ERROR;
- return FAULTY_INSTRUCTION;
- } /* end of grand switch */
-
- /* Copy data directly to output buffer */
-
- w16_tmp2 = 0;
- if ((inst->endPosition + len - inst->curPosition - inst->ExpandInst.w16_overlap)
- >= inst->timestampsPerCall)
- {
- w16_tmp2 = inst->endPosition - inst->curPosition;
- w16_tmp2 = WEBRTC_SPL_MAX(w16_tmp2, 0); /* Additional error protection, just in case */
- w16_tmp1 = WEBRTC_SPL_MIN(w16_tmp2, inst->timestampsPerCall);
- w16_tmp2 = inst->timestampsPerCall - w16_tmp1;
- WEBRTC_SPL_MEMCPY_W16(pw16_outData, &inst->speechBuffer[inst->curPosition], w16_tmp1);
- WEBRTC_SPL_MEMCPY_W16(&pw16_outData[w16_tmp1], pw16_NetEqAlgorithm_buffer, w16_tmp2);
- DataEnough = 1;
- }
- else
- {
- DataEnough = 0;
- }
-
- if (playDtmf != 0)
- {
-#ifdef NETEQ_ATEVENT_DECODE
- int16_t outDataIndex = 0;
- int16_t overdubLen = -1; /* default len */
- int16_t dtmfLen;
-
- /*
- * Overdub the output with DTMF. Note that this is not executed if the
- * DSP_INSTR_DTMF_GENERATE operation is performed above.
- */
- if (inst->DTMFInst.lastDtmfSample - inst->curPosition > 0)
- {
- /* special operation for transition from "DTMF only" to "DTMF overdub" */
- outDataIndex
- = WEBRTC_SPL_MIN(inst->DTMFInst.lastDtmfSample - inst->curPosition,
- inst->timestampsPerCall);
- overdubLen = inst->timestampsPerCall - outDataIndex;
- }
-
- dtmfLen = WebRtcNetEQ_DTMFGenerate(&inst->DTMFInst, dtmfValue, dtmfVolume,
- &pw16_outData[outDataIndex], inst->fs, overdubLen);
- if (dtmfLen < 0)
- {
- /* error occurred */
- return_value = dtmfLen;
- }
- inst->DTMFInst.reinit = 0;
-#else
- inst->w16_mode = MODE_ERROR;
- dspInfo->lastMode = MODE_ERROR;
- return FAULTY_INSTRUCTION;
-#endif
- }
-
- /*
- * Shuffle speech buffer to allow more data. Move data from pw16_NetEqAlgorithm_buffer
- * to speechBuffer.
- */
- if (instr != DSP_INSTR_EXPAND)
- {
- w16_tmp1 = WEBRTC_SPL_MIN(inst->endPosition, len);
- WEBRTC_SPL_MEMMOVE_W16(inst->speechBuffer, inst->speechBuffer + w16_tmp1,
- (inst->endPosition-w16_tmp1));
- WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->endPosition-w16_tmp1],
- &pw16_NetEqAlgorithm_buffer[len-w16_tmp1], w16_tmp1);
-#ifdef NETEQ_ATEVENT_DECODE
- /* Update index to end of DTMF data in speech buffer */
- if (instr == DSP_INSTR_DTMF_GENERATE)
- {
- /* We have written DTMF data to the end of speech buffer */
- inst->DTMFInst.lastDtmfSample = inst->endPosition;
- }
- else if (inst->DTMFInst.lastDtmfSample > 0)
- {
- /* The end of DTMF data in speech buffer has been shuffled */
- inst->DTMFInst.lastDtmfSample -= w16_tmp1;
- }
-#endif
- /*
- * Update the BGN history if last operation was not expand (nor Merge, Accelerate
- * or Pre-emptive expand, to save complexity).
- */
- if ((inst->w16_mode != MODE_EXPAND) && (inst->w16_mode != MODE_MERGE)
- && (inst->w16_mode != MODE_SUCCESS_ACCELERATE) && (inst->w16_mode
- != MODE_LOWEN_ACCELERATE) && (inst->w16_mode != MODE_SUCCESS_PREEMPTIVE)
- && (inst->w16_mode != MODE_LOWEN_PREEMPTIVE) && (inst->w16_mode
- != MODE_FADE_TO_BGN) && (inst->w16_mode != MODE_DTMF) && (!BGNonly))
- {
- WebRtcNetEQ_BGNUpdate(inst
-#ifdef SCRATCH
- , pw16_scratchPtr + SCRATCH_NETEQ_BGN_UPDATE
-#endif
- );
- }
- }
- else /* instr == DSP_INSTR_EXPAND */
- {
- /* Nothing should be done since data is already copied to output. */
- }
-
- inst->curPosition -= len;
-
- /*
- * Extra protection in case something should go totally wrong in terms of sizes...
- * If everything is ok this should NEVER happen.
- */
- if (inst->curPosition < -inst->timestampsPerCall)
- {
- inst->curPosition = -inst->timestampsPerCall;
- }
-
- if ((instr != DSP_INSTR_EXPAND) && (instr != DSP_INSTR_MERGE) && (instr
- != DSP_INSTR_FADE_TO_BGN))
- {
- /* Reset concealed TS parameter if it does not seem to have been flushed */
- if (inst->w16_concealedTS > inst->timestampsPerCall)
- {
- inst->w16_concealedTS = 0;
- }
- }
-
- /*
- * Double-check that we actually have 10 ms to play. If we haven't, there has been a
- * serious error.The decoder might have returned way too few samples
- */
- if (!DataEnough)
- {
- /* This should not happen. Set outdata to zeros, and return error. */
- WebRtcSpl_MemSetW16(pw16_outData, 0, inst->timestampsPerCall);
- *pw16_len = inst->timestampsPerCall;
- inst->w16_mode = MODE_ERROR;
- dspInfo->lastMode = MODE_ERROR;
- return RECOUT_ERROR_SAMPLEUNDERRUN;
- }
-
- /*
- * Update Videosync timestamp (this special timestamp is needed since the endTimestamp
- * stops during CNG and Expand periods.
- */
- if ((inst->w16_mode != MODE_EXPAND) && (inst->w16_mode != MODE_RFC3389CNG))
- {
- uint32_t uw32_tmpTS;
- uw32_tmpTS = inst->endTimestamp - (inst->endPosition - inst->curPosition);
- if ((int32_t) (uw32_tmpTS - inst->videoSyncTimestamp) > 0)
- {
- inst->videoSyncTimestamp = uw32_tmpTS;
- }
- }
- else
- {
- inst->videoSyncTimestamp += inst->timestampsPerCall;
- }
-
- /* After this, regardless of what has happened, deliver 10 ms of future data */
- inst->curPosition += inst->timestampsPerCall;
- *pw16_len = inst->timestampsPerCall;
-
- /* Remember if BGNonly was used */
- if (BGNonly)
- {
- inst->w16_mode |= MODE_BGN_ONLY;
- }
-
- return return_value;
-}
-
-#undef SCRATCH_ALGORITHM_BUFFER
-#undef SCRATCH_NETEQ_NORMAL
-#undef SCRATCH_NETEQ_MERGE
-#undef SCRATCH_NETEQ_BGN_UPDATE
-#undef SCRATCH_NETEQ_EXPAND
-#undef SCRATCH_DSP_INFO
-#undef SCRATCH_NETEQ_ACCELERATE
-#undef SIZE_SCRATCH_BUFFER
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.c
deleted file mode 100644
index d1ce934bcfb..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.c
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Implementation of RTCP statistics reporting.
- */
-
-#include "rtcp.h"
-
-#include <string.h>
-
-#include "signal_processing_library.h"
-
-int WebRtcNetEQ_RTCPInit(WebRtcNetEQ_RTCP_t *RTCP_inst, uint16_t uw16_seqNo)
-{
- /*
- * Initialize everything to zero and then set the start values for the RTP packet stream.
- */
- WebRtcSpl_MemSetW16((int16_t*) RTCP_inst, 0,
- sizeof(WebRtcNetEQ_RTCP_t) / sizeof(int16_t));
- RTCP_inst->base_seq = uw16_seqNo;
- RTCP_inst->max_seq = uw16_seqNo;
- return 0;
-}
-
-int WebRtcNetEQ_RTCPUpdate(WebRtcNetEQ_RTCP_t *RTCP_inst, uint16_t uw16_seqNo,
- uint32_t uw32_timeStamp, uint32_t uw32_recTime)
-{
- int16_t w16_SeqDiff;
- int32_t w32_TimeDiff;
- int32_t w32_JitterDiff;
-
- /*
- * Update number of received packets, and largest packet number received.
- */
- RTCP_inst->received++;
- w16_SeqDiff = uw16_seqNo - RTCP_inst->max_seq;
- if (w16_SeqDiff >= 0)
- {
- if (uw16_seqNo < RTCP_inst->max_seq)
- {
- /* Wrap around detected */
- RTCP_inst->cycles++;
- }
- RTCP_inst->max_seq = uw16_seqNo;
- }
-
- /* Calculate Jitter, and update previous timestamps */
- /* Note that the value in RTCP_inst->jitter is in Q4. */
- if (RTCP_inst->received > 1)
- {
- w32_TimeDiff = (uw32_recTime - (uw32_timeStamp - RTCP_inst->transit));
- w32_TimeDiff = WEBRTC_SPL_ABS_W32(w32_TimeDiff);
- w32_JitterDiff = WEBRTC_SPL_LSHIFT_W16(w32_TimeDiff, 4) - RTCP_inst->jitter;
- RTCP_inst->jitter = RTCP_inst->jitter + WEBRTC_SPL_RSHIFT_W32((w32_JitterDiff + 8), 4);
- }
- RTCP_inst->transit = (uw32_timeStamp - uw32_recTime);
- return 0;
-}
-
-int WebRtcNetEQ_RTCPGetStats(WebRtcNetEQ_RTCP_t *RTCP_inst,
- uint16_t *puw16_fraction_lost,
- uint32_t *puw32_cum_lost, uint32_t *puw32_ext_max,
- uint32_t *puw32_jitter, int16_t doNotReset)
-{
- uint32_t uw32_exp_nr, uw32_exp_interval, uw32_rec_interval;
- int32_t w32_lost;
-
- /* Extended highest sequence number received */
- *puw32_ext_max
- = (uint32_t) WEBRTC_SPL_LSHIFT_W32((uint32_t)RTCP_inst->cycles, 16)
- + RTCP_inst->max_seq;
-
- /*
- * Calculate expected number of packets and compare it to the number of packets that
- * were actually received => the cumulative number of packets lost can be extracted.
- */
- uw32_exp_nr = *puw32_ext_max - RTCP_inst->base_seq + 1;
- if (RTCP_inst->received == 0)
- {
- /* no packets received, assume none lost */
- *puw32_cum_lost = 0;
- }
- else if (uw32_exp_nr > RTCP_inst->received)
- {
- *puw32_cum_lost = uw32_exp_nr - RTCP_inst->received;
- if (*puw32_cum_lost > (uint32_t) 0xFFFFFF)
- {
- *puw32_cum_lost = 0xFFFFFF;
- }
- }
- else
- {
- *puw32_cum_lost = 0;
- }
-
- /* Fraction lost (Since last report) */
- uw32_exp_interval = uw32_exp_nr - RTCP_inst->exp_prior;
- if (!doNotReset)
- {
- RTCP_inst->exp_prior = uw32_exp_nr;
- }
- uw32_rec_interval = RTCP_inst->received - RTCP_inst->rec_prior;
- if (!doNotReset)
- {
- RTCP_inst->rec_prior = RTCP_inst->received;
- }
- w32_lost = (int32_t) (uw32_exp_interval - uw32_rec_interval);
- if (uw32_exp_interval == 0 || w32_lost <= 0 || RTCP_inst->received == 0)
- {
- *puw16_fraction_lost = 0;
- }
- else
- {
- *puw16_fraction_lost = (uint16_t) (WEBRTC_SPL_LSHIFT_W32(w32_lost, 8)
- / uw32_exp_interval);
- }
- if (*puw16_fraction_lost > 0xFF)
- {
- *puw16_fraction_lost = 0xFF;
- }
-
- /* Inter-arrival jitter */
- *puw32_jitter = (RTCP_inst->jitter) >> 4; /* scaling from Q4 */
- return 0;
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/rtcp.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.cc
index f9dcf449198..cf8e0280bb2 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/rtcp.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.cc
@@ -8,11 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/rtcp.h"
+#include "webrtc/modules/audio_coding/neteq/rtcp.h"
-#include <algorithm>
#include <string.h>
+#include <algorithm>
+
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/interface/module_common_types.h"
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.h
index 5e066eb38f2..2a765efa588 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/rtcp.h
@@ -8,95 +8,51 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-/*
- * RTCP statistics reporting.
- */
-
-#ifndef RTCP_H
-#define RTCP_H
-
-#include "typedefs.h"
-
-typedef struct
-{
- uint16_t cycles; /* The number of wrap-arounds for the sequence number */
- uint16_t max_seq; /* The maximum sequence number received
- (starts from 0 again after wrap around) */
- uint16_t base_seq; /* The sequence number of the first packet that arrived */
- uint32_t received; /* The number of packets that has been received */
- uint32_t rec_prior; /* Number of packets received when last report was generated */
- uint32_t exp_prior; /* Number of packets that should have been received if no
- packets were lost. Stored value from last report. */
- uint32_t jitter; /* Jitter statistics at this instance (calculated according to RFC) */
- int32_t transit; /* Clock difference for previous packet (RTPtimestamp - LOCALtime_rec) */
-} WebRtcNetEQ_RTCP_t;
-
-/****************************************************************************
- * WebRtcNetEQ_RTCPInit(...)
- *
- * This function calculates the parameters that are needed for the RTCP
- * report.
- *
- * Input:
- * - RTCP_inst : RTCP instance, that contains information about the
- * packets that have been received etc.
- * - seqNo : Packet number of the first received frame.
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RTCPInit(WebRtcNetEQ_RTCP_t *RTCP_inst, uint16_t uw16_seqNo);
-
-/****************************************************************************
- * WebRtcNetEQ_RTCPUpdate(...)
- *
- * This function calculates the parameters that are needed for the RTCP
- * report.
- *
- * Input:
- * - RTCP_inst : RTCP instance, that contains information about the
- * packets that have been received etc.
- * - seqNo : Packet number of the first received frame.
- * - timeStamp : Time stamp from the RTP header.
- * - recTime : Time (in RTP timestamps) when this packet was received.
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RTCPUpdate(WebRtcNetEQ_RTCP_t *RTCP_inst, uint16_t uw16_seqNo,
- uint32_t uw32_timeStamp, uint32_t uw32_recTime);
-
-/****************************************************************************
- * WebRtcNetEQ_RTCPGetStats(...)
- *
- * This function calculates the parameters that are needed for the RTCP
- * report.
- *
- * Input:
- * - RTCP_inst : RTCP instance, that contains information about the
- * packets that have been received etc.
- * - doNotReset : If non-zero, the fraction lost statistics will not
- * be reset.
- *
- * Output:
- * - RTCP_inst : Updated RTCP information (some statistics are
- * reset when generating this report)
- * - fraction_lost : Number of lost RTP packets divided by the number of
- * expected packets, since the last RTCP Report.
- * - cum_lost : Cumulative number of lost packets during this
- * session.
- * - ext_max : Extended highest sequence number received.
- * - jitter : Inter-arrival jitter.
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RTCPGetStats(WebRtcNetEQ_RTCP_t *RTCP_inst,
- uint16_t *puw16_fraction_lost,
- uint32_t *puw32_cum_lost, uint32_t *puw32_ext_max,
- uint32_t *puw32_jitter, int16_t doNotReset);
-
-#endif
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_RTCP_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_RTCP_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Forward declaration.
+struct RTPHeader;
+
+class Rtcp {
+ public:
+ Rtcp() {
+ Init(0);
+ }
+
+ ~Rtcp() {}
+
+ // Resets the RTCP statistics, and sets the first received sequence number.
+ void Init(uint16_t start_sequence_number);
+
+ // Updates the RTCP statistics with a new received packet.
+ void Update(const RTPHeader& rtp_header, uint32_t receive_timestamp);
+
+ // Returns the current RTCP statistics. If |no_reset| is true, the statistics
+ // are not reset, otherwise they are.
+ void GetStatistics(bool no_reset, RtcpStatistics* stats);
+
+ private:
+ uint16_t cycles_; // The number of wrap-arounds for the sequence number.
+ uint16_t max_seq_no_; // The maximum sequence number received. Starts over
+ // from 0 after wrap-around.
+ uint16_t base_seq_no_; // The sequence number of the first received packet.
+ uint32_t received_packets_; // The number of packets that have been received.
+ uint32_t received_packets_prior_; // Number of packets received when last
+ // report was generated.
+ uint32_t expected_prior_; // Expected number of packets, at the time of the
+ // last report.
+ uint32_t jitter_; // Current jitter value.
+ int32_t transit_; // Clock difference for previous packet.
+
+ DISALLOW_COPY_AND_ASSIGN(Rtcp);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_RTCP_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/rtp.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/rtp.c
deleted file mode 100644
index 6ab5944b5aa..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/rtp.c
+++ /dev/null
@@ -1,240 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * RTP related functions.
- */
-
-#include "rtp.h"
-
-#include "typedefs.h" /* to define endianness */
-
-#include "neteq_error_codes.h"
-
-int WebRtcNetEQ_RTPPayloadInfo(int16_t* pw16_Datagram, int i_DatagramLen,
- RTPPacket_t* RTPheader)
-{
- int i_P, i_X, i_CC, i_startPosition;
- int i_IPver;
- int i_extlength = -1; /* Default value is there is no extension */
- int i_padlength = 0; /* Default value if there is no padding */
-
- if (i_DatagramLen < 12)
- {
- return RTP_TOO_SHORT_PACKET;
- }
-
-#ifdef WEBRTC_ARCH_BIG_ENDIAN
- i_IPver = (((uint16_t) (pw16_Datagram[0] & 0xC000)) >> 14); /* Extract the version */
- i_P = (((uint16_t) (pw16_Datagram[0] & 0x2000)) >> 13); /* Extract the P bit */
- i_X = (((uint16_t) (pw16_Datagram[0] & 0x1000)) >> 12); /* Extract the X bit */
- i_CC = ((uint16_t) (pw16_Datagram[0] >> 8) & 0xF); /* Get the CC number */
- RTPheader->payloadType = pw16_Datagram[0] & 0x7F; /* Get the coder type */
- RTPheader->seqNumber = pw16_Datagram[1]; /* Get the sequence number */
- RTPheader->timeStamp = ((((uint32_t) ((uint16_t) pw16_Datagram[2])) << 16)
- | (uint16_t) (pw16_Datagram[3])); /* Get timestamp */
- RTPheader->ssrc = (((uint32_t) pw16_Datagram[4]) << 16)
- + (((uint32_t) pw16_Datagram[5])); /* Get the SSRC */
-
- if (i_X == 1)
- {
- /* Extension header exists. Find out how many int32_t it consists of. */
- i_extlength = pw16_Datagram[7 + 2 * i_CC];
- }
- if (i_P == 1)
- {
- /* Padding exists. Find out how many bytes the padding consists of. */
- if (i_DatagramLen & 0x1)
- {
- /* odd number of bytes => last byte in higher byte */
- i_padlength = (((uint16_t) pw16_Datagram[i_DatagramLen >> 1]) >> 8);
- }
- else
- {
- /* even number of bytes => last byte in lower byte */
- i_padlength = ((pw16_Datagram[(i_DatagramLen >> 1) - 1]) & 0xFF);
- }
- }
-#else /* WEBRTC_ARCH_LITTLE_ENDIAN */
- i_IPver = (((uint16_t) (pw16_Datagram[0] & 0xC0)) >> 6); /* Extract the IP version */
- i_P = (((uint16_t) (pw16_Datagram[0] & 0x20)) >> 5); /* Extract the P bit */
- i_X = (((uint16_t) (pw16_Datagram[0] & 0x10)) >> 4); /* Extract the X bit */
- i_CC = (uint16_t) (pw16_Datagram[0] & 0xF); /* Get the CC number */
- RTPheader->payloadType = (pw16_Datagram[0] >> 8) & 0x7F; /* Get the coder type */
- RTPheader->seqNumber = (((((uint16_t) pw16_Datagram[1]) >> 8) & 0xFF)
- | (((uint16_t) (pw16_Datagram[1] & 0xFF)) << 8)); /* Get the packet number */
- RTPheader->timeStamp = ((((uint16_t) pw16_Datagram[2]) & 0xFF) << 24)
- | ((((uint16_t) pw16_Datagram[2]) & 0xFF00) << 8)
- | ((((uint16_t) pw16_Datagram[3]) >> 8) & 0xFF)
- | ((((uint16_t) pw16_Datagram[3]) & 0xFF) << 8); /* Get timestamp */
- RTPheader->ssrc = ((((uint16_t) pw16_Datagram[4]) & 0xFF) << 24)
- | ((((uint16_t) pw16_Datagram[4]) & 0xFF00) << 8)
- | ((((uint16_t) pw16_Datagram[5]) >> 8) & 0xFF)
- | ((((uint16_t) pw16_Datagram[5]) & 0xFF) << 8); /* Get the SSRC */
-
- if (i_X == 1)
- {
- /* Extension header exists. Find out how many int32_t it consists of. */
- i_extlength = (((((uint16_t) pw16_Datagram[7 + 2 * i_CC]) >> 8) & 0xFF)
- | (((uint16_t) (pw16_Datagram[7 + 2 * i_CC] & 0xFF)) << 8));
- }
- if (i_P == 1)
- {
- /* Padding exists. Find out how many bytes the padding consists of. */
- if (i_DatagramLen & 0x1)
- {
- /* odd number of bytes => last byte in higher byte */
- i_padlength = (pw16_Datagram[i_DatagramLen >> 1] & 0xFF);
- }
- else
- {
- /* even number of bytes => last byte in lower byte */
- i_padlength = (((uint16_t) pw16_Datagram[(i_DatagramLen >> 1) - 1]) >> 8);
- }
- }
-#endif
-
- i_startPosition = 12 + 4 * (i_extlength + 1) + 4 * i_CC;
- RTPheader->payload = &pw16_Datagram[i_startPosition >> 1];
- RTPheader->payloadLen = i_DatagramLen - i_startPosition - i_padlength;
- RTPheader->starts_byte1 = 0;
-
- if ((i_IPver != 2) || (RTPheader->payloadLen <= 0) || (RTPheader->payloadLen >= 16000)
- || (i_startPosition < 12) || (i_startPosition > i_DatagramLen))
- {
- return RTP_CORRUPT_PACKET;
- }
-
- return 0;
-}
-
-#ifdef NETEQ_RED_CODEC
-
-int WebRtcNetEQ_RedundancySplit(RTPPacket_t* RTPheader[], int i_MaximumPayloads,
- int *i_No_Of_Payloads)
-{
- const int16_t *pw16_data = RTPheader[0]->payload; /* Pointer to the data */
- uint16_t uw16_offsetTimeStamp = 65535, uw16_secondPayload = 65535;
- int i_blockLength, i_k;
- int i_discardedBlockLength = 0;
- int singlePayload = 0;
-
-#ifdef WEBRTC_ARCH_BIG_ENDIAN
- if ((pw16_data[0] & 0x8000) == 0)
- {
- /* Only one payload in this packet*/
- singlePayload = 1;
- /* set the blocklength to -4 to deduce the non-existent 4-byte RED header */
- i_blockLength = -4;
- RTPheader[0]->payloadType = ((((uint16_t)pw16_data[0]) & 0x7F00) >> 8);
- }
- else
- {
- /* Discard all but the two last payloads. */
- while (((pw16_data[2] & 0x8000) != 0) &&
- (pw16_data<((RTPheader[0]->payload)+((RTPheader[0]->payloadLen+1)>>1))))
- {
- i_discardedBlockLength += (4+(((uint16_t)pw16_data[1]) & 0x3FF));
- pw16_data+=2;
- }
- if (pw16_data>=(RTPheader[0]->payload+((RTPheader[0]->payloadLen+1)>>1)))
- {
- return RED_SPLIT_ERROR2; /* Error, we are outside the packet */
- }
- singlePayload = 0; /* the packet contains more than one payload */
- uw16_secondPayload = ((((uint16_t)pw16_data[0]) & 0x7F00) >> 8);
- RTPheader[0]->payloadType = ((((uint16_t)pw16_data[2]) & 0x7F00) >> 8);
- uw16_offsetTimeStamp = ((((uint16_t)pw16_data[0]) & 0xFF) << 6) +
- ((((uint16_t)pw16_data[1]) & 0xFC00) >> 10);
- i_blockLength = (((uint16_t)pw16_data[1]) & 0x3FF);
- }
-#else /* WEBRTC_ARCH_LITTLE_ENDIAN */
- if ((pw16_data[0] & 0x80) == 0)
- {
- /* Only one payload in this packet */
- singlePayload = 1;
- /* set the blocklength to -4 to deduce the non-existent 4-byte RED header */
- i_blockLength = -4;
- RTPheader[0]->payloadType = (((uint16_t) pw16_data[0]) & 0x7F);
- }
- else
- {
- /* Discard all but the two last payloads. */
- while (((pw16_data[2] & 0x80) != 0) && (pw16_data < ((RTPheader[0]->payload)
- + ((RTPheader[0]->payloadLen + 1) >> 1))))
- {
- i_discardedBlockLength += (4 + ((((uint16_t) pw16_data[1]) & 0x3) << 8)
- + ((((uint16_t) pw16_data[1]) & 0xFF00) >> 8));
- pw16_data += 2;
- }
- if (pw16_data >= (RTPheader[0]->payload + ((RTPheader[0]->payloadLen + 1) >> 1)))
- {
- return RED_SPLIT_ERROR2; /* Error, we are outside the packet */;
- }
- singlePayload = 0; /* the packet contains more than one payload */
- uw16_secondPayload = (((uint16_t) pw16_data[0]) & 0x7F);
- RTPheader[0]->payloadType = (((uint16_t) pw16_data[2]) & 0x7F);
- uw16_offsetTimeStamp = ((((uint16_t) pw16_data[0]) & 0xFF00) >> 2)
- + ((((uint16_t) pw16_data[1]) & 0xFC) >> 2);
- i_blockLength = ((((uint16_t) pw16_data[1]) & 0x3) << 8)
- + ((((uint16_t) pw16_data[1]) & 0xFF00) >> 8);
- }
-#endif
-
- if (i_MaximumPayloads < 2 || singlePayload == 1)
- {
- /* Reject the redundancy; or no redundant payload present. */
- for (i_k = 1; i_k < i_MaximumPayloads; i_k++)
- {
- RTPheader[i_k]->payloadType = -1;
- RTPheader[i_k]->payloadLen = 0;
- }
-
- /* update the pointer for the main data */
- pw16_data = &pw16_data[(5 + i_blockLength) >> 1];
- RTPheader[0]->starts_byte1 = (5 + i_blockLength) & 0x1;
- RTPheader[0]->payloadLen = RTPheader[0]->payloadLen - (i_blockLength + 5)
- - i_discardedBlockLength;
- RTPheader[0]->payload = pw16_data;
-
- *i_No_Of_Payloads = 1;
-
- }
- else
- {
- /* Redundancy accepted, put the redundancy in second RTPheader. */
- RTPheader[1]->payloadType = uw16_secondPayload;
- RTPheader[1]->payload = &pw16_data[5 >> 1];
- RTPheader[1]->starts_byte1 = 5 & 0x1;
- RTPheader[1]->seqNumber = RTPheader[0]->seqNumber;
- RTPheader[1]->timeStamp = RTPheader[0]->timeStamp - uw16_offsetTimeStamp;
- RTPheader[1]->ssrc = RTPheader[0]->ssrc;
- RTPheader[1]->payloadLen = i_blockLength;
-
- /* Modify first RTP packet, so that it contains the main data. */
- RTPheader[0]->payload = &pw16_data[(5 + i_blockLength) >> 1];
- RTPheader[0]->starts_byte1 = (5 + i_blockLength) & 0x1;
- RTPheader[0]->payloadLen = RTPheader[0]->payloadLen - (i_blockLength + 5)
- - i_discardedBlockLength;
-
- /* Clear the following payloads. */
- for (i_k = 2; i_k < i_MaximumPayloads; i_k++)
- {
- RTPheader[i_k]->payloadType = -1;
- RTPheader[i_k]->payloadLen = 0;
- }
-
- *i_No_Of_Payloads = 2;
- }
- return 0;
-}
-
-#endif
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/rtp.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/rtp.h
deleted file mode 100644
index 4642eaef770..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/rtp.h
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * RTP data struct and related functions.
- */
-
-#ifndef RTP_H
-#define RTP_H
-
-#include "typedefs.h"
-
-#include "codec_db.h"
-
-typedef struct
-{
- uint16_t seqNumber;
- uint32_t timeStamp;
- uint32_t ssrc;
- int payloadType;
- const int16_t *payload;
- int16_t payloadLen;
- int16_t starts_byte1;
- int16_t rcuPlCntr;
-} RTPPacket_t;
-
-/****************************************************************************
- * WebRtcNetEQ_RTPPayloadInfo(...)
- *
- * Converts a datagram into an RTP header struct.
- *
- * Input:
- * - Datagram : UDP datagram from the network
- * - DatagramLen : Length in bytes of the datagram
- *
- * Output:
- * - RTPheader : Structure with the datagram info
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RTPPayloadInfo(int16_t* pw16_Datagram, int i_DatagramLen,
- RTPPacket_t* RTPheader);
-
-/****************************************************************************
- * WebRtcNetEQ_RedundancySplit(...)
- *
- * Splits a Redundancy RTP struct into two RTP structs. User has to check
- * that it's really the redundancy payload. No such check is done inside this
- * function.
- *
- * Input:
- * - RTPheader : First header holds the whole RTP packet (with the redundancy payload)
- * - MaximumPayloads:
- * The maximum number of RTP payloads that should be
- * extracted (1+maximum_no_of_Redundancies).
- *
- * Output:
- * - RTPheader : First header holds the main RTP data, while 2..N
- * holds the redundancy data.
- * - No_Of
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RedundancySplit(RTPPacket_t* RTPheader[], int i_MaximumPayloads,
- int *i_No_Of_Payloads);
-
-#endif
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/set_fs.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/set_fs.c
deleted file mode 100644
index ac974548e61..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/set_fs.c
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Function were the sample rate is set.
- */
-
-#include "mcu.h"
-
-#include "dtmf_buffer.h"
-#include "neteq_error_codes.h"
-
-int WebRtcNetEQ_McuSetFs(MCUInst_t *inst, uint16_t fs)
-{
- int16_t ok = 0;
-
- switch (fs)
- {
- case 8000:
- {
-#ifdef NETEQ_ATEVENT_DECODE
- ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 8000, 560);
-#endif
- inst->timestampsPerCall = inst->millisecondsPerCall * 8;
- break;
- }
-
-#ifdef NETEQ_WIDEBAND
- case 16000:
- {
-#ifdef NETEQ_ATEVENT_DECODE
- ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 16000, 1120);
-#endif
- inst->timestampsPerCall = inst->millisecondsPerCall * 16;
- break;
- }
-#endif
-
-#ifdef NETEQ_32KHZ_WIDEBAND
- case 32000:
- {
-#ifdef NETEQ_ATEVENT_DECODE
- ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 32000, 2240);
-#endif
- inst->timestampsPerCall = inst->millisecondsPerCall * 32;
- break;
- }
-#endif
-
-#ifdef NETEQ_48KHZ_WIDEBAND
- case 48000:
- {
-#ifdef NETEQ_ATEVENT_DECODE
- ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 48000, 3360);
-#endif
- inst->timestampsPerCall = inst->millisecondsPerCall * 48;
- break;
- }
-#endif
-
- default:
- {
- /* Not supported yet */
- return CODEC_DB_UNSUPPORTED_FS;
- }
- } /* end switch */
-
- inst->fs = fs;
-
- return ok;
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/signal_mcu.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/signal_mcu.c
deleted file mode 100644
index b795ec30e38..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/signal_mcu.c
+++ /dev/null
@@ -1,820 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Signal the MCU that data is available and ask for a RecOut decision.
- */
-
-#include "mcu.h"
-
-#include <string.h>
-
-#include "signal_processing_library.h"
-
-#include "automode.h"
-#include "dtmf_buffer.h"
-#include "mcu_dsp_common.h"
-#include "neteq_error_codes.h"
-
-#ifdef NETEQ_DELAY_LOGGING
-#include "delay_logging.h"
-#include <stdio.h>
-
-extern FILE *delay_fid2; /* file pointer to delay log file */
-#endif
-
-
-/*
- * Update the frame size, if we can.
- */
-static int WebRtcNetEQ_UpdatePackSizeSamples(MCUInst_t* inst, int buffer_pos,
- int payload_type,
- int pack_size_samples) {
- if (buffer_pos >= 0) {
- int codec_pos;
- codec_pos = WebRtcNetEQ_DbGetCodec(&inst->codec_DB_inst, payload_type);
- if (codec_pos >= 0) {
- codec_pos = inst->codec_DB_inst.position[codec_pos];
- if (codec_pos >= 0) {
- int temp_packet_size_samples = WebRtcNetEQ_PacketBufferGetPacketSize(
- &inst->PacketBuffer_inst, buffer_pos, &inst->codec_DB_inst,
- codec_pos, pack_size_samples, inst->av_sync);
- if (temp_packet_size_samples > 0)
- return temp_packet_size_samples;
- return pack_size_samples;
- }
- }
- }
- return pack_size_samples;
-}
-
-/*
- * Signals the MCU that DSP status data is available.
- */
-int WebRtcNetEQ_SignalMcu(MCUInst_t *inst)
-{
-
- int i_bufferpos, i_res;
- uint16_t uw16_instr;
- DSP2MCU_info_t dspInfo;
- int16_t *blockPtr, blockLen;
- uint32_t uw32_availableTS;
- RTPPacket_t temp_pkt;
- int32_t w32_bufsize, w32_tmp;
- int16_t payloadType = -1;
- int16_t wantedNoOfTimeStamps;
- int32_t totalTS;
- int16_t oldPT, latePacketExist = 0;
- uint32_t oldTS, prevTS, uw32_tmp;
- uint16_t prevSeqNo;
- int16_t nextSeqNoAvail;
- int16_t fs_mult, w16_tmp;
- int16_t lastModeBGNonly = 0;
-#ifdef NETEQ_DELAY_LOGGING
- int temp_var;
-#endif
- int playDtmf = 0;
-
- fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
-
- /* Increment counter since last statistics report */
- inst->lastReportTS += inst->timestampsPerCall;
-
- /* Increment waiting time for all packets. */
- WebRtcNetEQ_IncrementWaitingTimes(&inst->PacketBuffer_inst);
-
- /* Read info from DSP so we now current status */
-
- WEBRTC_SPL_MEMCPY_W8(&dspInfo,inst->pw16_readAddress,sizeof(DSP2MCU_info_t));
-
- /* Set blockPtr to first payload block */
- blockPtr = &inst->pw16_writeAddress[3];
-
- /* Clear instruction word and number of lost samples (2*int16_t) */
- inst->pw16_writeAddress[0] = 0;
- inst->pw16_writeAddress[1] = 0;
- inst->pw16_writeAddress[2] = 0;
-
- if ((dspInfo.lastMode & MODE_AWAITING_CODEC_PTR) != 0)
- {
- /*
- * Make sure state is adjusted so that a codec update is
- * performed when first packet arrives.
- */
- if (inst->new_codec != 1)
- {
- inst->current_Codec = -1;
- }
- dspInfo.lastMode = (dspInfo.lastMode ^ MODE_AWAITING_CODEC_PTR);
- }
-
-#ifdef NETEQ_STEREO
- if ((dspInfo.lastMode & MODE_MASTER_DTMF_SIGNAL) != 0)
- {
- playDtmf = 1; /* force DTMF decision */
- dspInfo.lastMode = (dspInfo.lastMode ^ MODE_MASTER_DTMF_SIGNAL);
- }
-
- if ((dspInfo.lastMode & MODE_USING_STEREO) != 0)
- {
- if (inst->usingStereo == 0)
- {
- /* stereo mode changed; reset automode instance to re-synchronize statistics */
- WebRtcNetEQ_ResetAutomode(&(inst->BufferStat_inst.Automode_inst),
- inst->PacketBuffer_inst.maxInsertPositions);
- }
- inst->usingStereo = 1;
- dspInfo.lastMode = (dspInfo.lastMode ^ MODE_USING_STEREO);
- }
- else
- {
- inst->usingStereo = 0;
- }
-#endif
-
- /* detect if BGN_ONLY flag is set in lastMode */
- if ((dspInfo.lastMode & MODE_BGN_ONLY) != 0)
- {
- lastModeBGNonly = 1; /* remember flag */
- dspInfo.lastMode ^= MODE_BGN_ONLY; /* clear the flag */
- }
-
- if ((dspInfo.lastMode == MODE_RFC3389CNG) || (dspInfo.lastMode == MODE_CODEC_INTERNAL_CNG)
- || (dspInfo.lastMode == MODE_EXPAND))
- {
- /*
- * If last mode was CNG (or Expand, since this could be covering up for a lost CNG
- * packet), increase the CNGplayedTS counter.
- */
- inst->BufferStat_inst.uw32_CNGplayedTS += inst->timestampsPerCall;
-
- if (dspInfo.lastMode == MODE_RFC3389CNG)
- {
- /* remember that RFC3389CNG is on (needed if CNG is interrupted by DTMF) */
- inst->BufferStat_inst.w16_cngOn = CNG_RFC3389_ON;
- }
- else if (dspInfo.lastMode == MODE_CODEC_INTERNAL_CNG)
- {
- /* remember that internal CNG is on (needed if CNG is interrupted by DTMF) */
- inst->BufferStat_inst.w16_cngOn = CNG_INTERNAL_ON;
- }
-
- }
-
- /* Update packet size from previously decoded packet */
- if (dspInfo.frameLen > 0)
- {
- inst->PacketBuffer_inst.packSizeSamples = dspInfo.frameLen;
- }
-
- /* Look for late packet (unless codec has changed) */
- if (inst->new_codec != 1)
- {
- if (WebRtcNetEQ_DbIsMDCodec((enum WebRtcNetEQDecoder) inst->current_Codec))
- {
- WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
- inst->timeStamp, &uw32_availableTS, &i_bufferpos, 1, &payloadType);
- if ((inst->new_codec != 1) && (inst->timeStamp == uw32_availableTS)
- && (inst->timeStamp < dspInfo.playedOutTS) && (i_bufferpos != -1)
- && (WebRtcNetEQ_DbGetPayload(&(inst->codec_DB_inst),
- (enum WebRtcNetEQDecoder) inst->current_Codec) == payloadType))
- {
- int waitingTime;
- temp_pkt.payload = blockPtr + 1;
- i_res = WebRtcNetEQ_PacketBufferExtract(&inst->PacketBuffer_inst, &temp_pkt,
- i_bufferpos, &waitingTime);
- if (i_res < 0)
- { /* error returned */
- return i_res;
- }
- WebRtcNetEQ_StoreWaitingTime(inst, waitingTime);
- *blockPtr = temp_pkt.payloadLen;
- /* set the flag if this is a redundant payload */
- if (temp_pkt.rcuPlCntr > 0)
- {
- *blockPtr = (*blockPtr) | (DSP_CODEC_RED_FLAG);
- }
- blockPtr += ((temp_pkt.payloadLen + 1) >> 1) + 1;
-
- /*
- * Close the data with a zero size block, in case we will not write any
- * more data.
- */
- *blockPtr = 0;
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0xf0ff)
- | DSP_CODEC_ADD_LATE_PKT;
- latePacketExist = 1;
- }
- }
- }
-
- i_res = WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
- dspInfo.playedOutTS, &uw32_availableTS, &i_bufferpos, (inst->new_codec == 0),
- &payloadType);
- if (i_res < 0)
- { /* error returned */
- return i_res;
- }
-
- if (inst->BufferStat_inst.w16_cngOn == CNG_RFC3389_ON)
- {
- /*
- * Because of timestamp peculiarities, we have to "manually" disallow using a CNG
- * packet with the same timestamp as the one that was last played. This can happen
- * when using redundancy and will cause the timing to shift.
- */
- while (i_bufferpos != -1 && WebRtcNetEQ_DbIsCNGPayload(&inst->codec_DB_inst,
- payloadType) && dspInfo.playedOutTS >= uw32_availableTS)
- {
-
- /* Don't use this packet, discard it */
- inst->PacketBuffer_inst.payloadType[i_bufferpos] = -1;
- inst->PacketBuffer_inst.payloadLengthBytes[i_bufferpos] = 0;
- inst->PacketBuffer_inst.numPacketsInBuffer--;
-
- /* Check buffer again */
- WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
- dspInfo.playedOutTS, &uw32_availableTS, &i_bufferpos, (inst->new_codec == 0),
- &payloadType);
- }
- }
-
- /* Check packet buffer */
- w32_bufsize = WebRtcNetEQ_PacketBufferGetSize(&inst->PacketBuffer_inst,
- &inst->codec_DB_inst, inst->av_sync);
-
- if (dspInfo.lastMode == MODE_SUCCESS_ACCELERATE || dspInfo.lastMode
- == MODE_LOWEN_ACCELERATE || dspInfo.lastMode == MODE_SUCCESS_PREEMPTIVE
- || dspInfo.lastMode == MODE_LOWEN_PREEMPTIVE)
- {
- /* Subtract (dspInfo.samplesLeft + inst->timestampsPerCall) from sampleMemory */
- inst->BufferStat_inst.Automode_inst.sampleMemory -= dspInfo.samplesLeft
- + inst->timestampsPerCall;
- }
-
- /* calculate total current buffer size (in ms*8), including sync buffer */
- w32_bufsize = WebRtcSpl_DivW32W16((w32_bufsize + dspInfo.samplesLeft), fs_mult);
-
-#ifdef NETEQ_ATEVENT_DECODE
- /* DTMF data will affect the decision */
- if (WebRtcNetEQ_DtmfDecode(&inst->DTMF_inst, blockPtr + 1, blockPtr + 2,
- dspInfo.playedOutTS + inst->BufferStat_inst.uw32_CNGplayedTS) > 0)
- {
- playDtmf = 1;
-
- /* Flag DTMF payload */
- inst->pw16_writeAddress[0] = inst->pw16_writeAddress[0] | DSP_DTMF_PAYLOAD;
-
- /* Block Length in bytes */
- blockPtr[0] = 4;
- /* Advance to next payload position */
- blockPtr += 3;
- }
-#endif
-
- /* Update the frame size, if we can. */
- inst->PacketBuffer_inst.packSizeSamples =
- WebRtcNetEQ_UpdatePackSizeSamples(inst, i_bufferpos, payloadType,
- inst->PacketBuffer_inst.packSizeSamples);
- /* Update statistics and make decision */
- uw16_instr = WebRtcNetEQ_BufstatsDecision(&inst->BufferStat_inst,
- inst->PacketBuffer_inst.packSizeSamples, w32_bufsize, dspInfo.playedOutTS,
- uw32_availableTS, i_bufferpos == -1,
- WebRtcNetEQ_DbIsCNGPayload(&inst->codec_DB_inst, payloadType), dspInfo.lastMode,
- inst->NetEqPlayoutMode, inst->timestampsPerCall, inst->NoOfExpandCalls, fs_mult,
- lastModeBGNonly, playDtmf);
-
- /* Check if time to reset loss counter */
- if (inst->lastReportTS > WEBRTC_SPL_UMUL(inst->fs, MAX_LOSS_REPORT_PERIOD))
- {
- /* reset loss counter */
- WebRtcNetEQ_ResetMcuInCallStats(inst);
- }
-
- /* Check sync buffer size */
- if ((dspInfo.samplesLeft >= inst->timestampsPerCall) && (uw16_instr
- != BUFSTATS_DO_ACCELERATE) && (uw16_instr != BUFSTATS_DO_MERGE) && (uw16_instr
- != BUFSTATS_DO_PREEMPTIVE_EXPAND))
- {
- *blockPtr = 0;
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff) | DSP_INSTR_NORMAL;
- return 0;
- }
-
- if (uw16_instr == BUFSTATS_DO_EXPAND)
- {
- inst->NoOfExpandCalls++;
- }
- else
- {
- /* reset counter */
- inst->NoOfExpandCalls = 0;
- }
-
- /* New codec or big change in packet number? */
- if ((inst->new_codec) || (uw16_instr == BUFSTAT_REINIT))
- {
- CodecFuncInst_t cinst;
-
- /* Clear other instructions */
- blockPtr = &inst->pw16_writeAddress[3];
- /* Clear instruction word */
- inst->pw16_writeAddress[0] = 0;
-
- inst->timeStamp = uw32_availableTS;
- dspInfo.playedOutTS = uw32_availableTS;
- if (inst->current_Codec != -1)
- {
- i_res = WebRtcNetEQ_DbGetPtrs(&inst->codec_DB_inst,
- (enum WebRtcNetEQDecoder) inst->current_Codec, &cinst);
- if (i_res < 0)
- { /* error returned */
- return i_res;
- }
- }
- else
- {
- /* The main codec has not been initialized yet (first packets are DTMF or CNG). */
- if (WebRtcNetEQ_DbIsCNGPayload(&inst->codec_DB_inst, payloadType))
- {
- /* The currently extracted packet is CNG; get CNG fs */
- uint16_t tempFs;
-
- tempFs = WebRtcNetEQ_DbGetSampleRate(&inst->codec_DB_inst, payloadType);
- /* TODO(tlegrand): Remove this limitation once ACM has full
- * 48 kHz support. */
- if (tempFs > 32000)
- {
- inst->fs = 32000;
- }
- else if (tempFs > 0)
- {
- inst->fs = tempFs;
- }
- }
- WebRtcSpl_MemSetW16((int16_t*) &cinst, 0,
- sizeof(CodecFuncInst_t) / sizeof(int16_t));
- cinst.codec_fs = inst->fs;
- }
- cinst.timeStamp = inst->timeStamp;
- blockLen = (sizeof(CodecFuncInst_t)) >> (sizeof(int16_t) - 1); /* in Word16 */
- *blockPtr = blockLen * 2;
- blockPtr++;
- WEBRTC_SPL_MEMCPY_W8(blockPtr,&cinst,sizeof(CodecFuncInst_t));
- blockPtr += blockLen;
- inst->new_codec = 0;
-
- /* Reinitialize the MCU fs */
- i_res = WebRtcNetEQ_McuSetFs(inst, cinst.codec_fs);
- if (i_res < 0)
- { /* error returned */
- return i_res;
- }
-
- /* Set the packet size by guessing */
- inst->PacketBuffer_inst.packSizeSamples =
- WebRtcNetEQ_UpdatePackSizeSamples(inst, i_bufferpos, payloadType,
- inst->timestampsPerCall * 3);
-
- WebRtcNetEQ_ResetAutomode(&(inst->BufferStat_inst.Automode_inst),
- inst->PacketBuffer_inst.maxInsertPositions);
-
-#ifdef NETEQ_CNG_CODEC
- /* Also insert CNG state as this might be needed by DSP */
- i_res = WebRtcNetEQ_DbGetPtrs(&inst->codec_DB_inst, kDecoderCNG, &cinst);
- if ((i_res < 0) && (i_res != CODEC_DB_NOT_EXIST1))
- {
- /* other error returned */
- /* (CODEC_DB_NOT_EXIST1 simply indicates that CNG is not used */
- return i_res;
- }
- else
- {
- /* CNG exists */
- blockLen = (sizeof(cinst.codec_state)) >> (sizeof(int16_t) - 1);
- *blockPtr = blockLen * 2;
- blockPtr++;
- WEBRTC_SPL_MEMCPY_W8(blockPtr,&cinst.codec_state,sizeof(cinst.codec_state));
- blockPtr += blockLen;
- }
-#endif
-
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0xf0ff)
- | DSP_CODEC_NEW_CODEC;
-
- if (uw16_instr == BUFSTATS_DO_RFC3389CNG_NOPACKET)
- {
- /*
- * Change decision to CNG packet, since we do have a CNG packet, but it was
- * considered too early to use. Now, use it anyway.
- */
- uw16_instr = BUFSTATS_DO_RFC3389CNG_PACKET;
- }
- else if (uw16_instr != BUFSTATS_DO_RFC3389CNG_PACKET)
- {
- uw16_instr = BUFSTATS_DO_NORMAL;
- }
-
- /* reset loss counter */
- WebRtcNetEQ_ResetMcuInCallStats(inst);
- }
-
- /* Should we just reset the decoder? */
- if (uw16_instr == BUFSTAT_REINIT_DECODER)
- {
- /* Change decision to normal and flag decoder reset */
- uw16_instr = BUFSTATS_DO_NORMAL;
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0xf0ff) | DSP_CODEC_RESET;
- }
-
- /* Expand requires no new packet */
- if (uw16_instr == BUFSTATS_DO_EXPAND)
- {
-
- inst->timeStamp = dspInfo.playedOutTS;
-
- /* Have we got one descriptor left? */
- if (WebRtcNetEQ_DbIsMDCodec((enum WebRtcNetEQDecoder) inst->current_Codec)
- && (dspInfo.MD || latePacketExist))
- {
-
- if (dspInfo.lastMode != MODE_ONE_DESCRIPTOR)
- {
- /* this is the first "consecutive" one-descriptor decoding; reset counter */
- inst->one_desc = 0;
- }
- if (inst->one_desc < MAX_ONE_DESC)
- {
- /* use that one descriptor */
- inst->one_desc++; /* increase counter */
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_NORMAL_ONE_DESC;
-
- /* decrease counter since we did no Expand */
- inst->NoOfExpandCalls = WEBRTC_SPL_MAX(inst->NoOfExpandCalls - 1, 0);
- return 0;
- }
- else
- {
- /* too many consecutive one-descriptor decodings; do expand instead */
- inst->one_desc = 0; /* reset counter */
- }
-
- }
-
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff) | DSP_INSTR_EXPAND;
- return 0;
- }
-
- /* Merge is not needed if we still have a descriptor */
- if ((uw16_instr == BUFSTATS_DO_MERGE) && (dspInfo.MD != 0))
- {
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_NORMAL_ONE_DESC;
- *blockPtr = 0;
- return 0;
- }
-
- /* Do CNG without trying to extract any packets from buffer */
- if (uw16_instr == BUFSTATS_DO_RFC3389CNG_NOPACKET)
- {
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_DO_RFC3389CNG;
- *blockPtr = 0;
- return 0;
- }
-
- /* Do built-in CNG without extracting any new packets from buffer */
- if (uw16_instr == BUFSTATS_DO_INTERNAL_CNG_NOPACKET)
- {
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_DO_CODEC_INTERNAL_CNG;
- *blockPtr = 0;
- return 0;
- }
-
- /* Do DTMF without extracting any new packets from buffer */
- if (uw16_instr == BUFSTATS_DO_DTMF_ONLY)
- {
- uint32_t timeStampJump = 0;
-
- /* Update timestamp */
- if ((inst->BufferStat_inst.uw32_CNGplayedTS > 0) && (dspInfo.lastMode != MODE_DTMF))
- {
- /* Jump in timestamps if needed */
- timeStampJump = inst->BufferStat_inst.uw32_CNGplayedTS;
- inst->pw16_writeAddress[1] = (uint16_t) (timeStampJump >> 16);
- inst->pw16_writeAddress[2] = (uint16_t) (timeStampJump & 0xFFFF);
- }
-
- inst->timeStamp = dspInfo.playedOutTS + timeStampJump;
-
- inst->BufferStat_inst.uw32_CNGplayedTS = 0;
- inst->NoOfExpandCalls = 0;
-
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_DTMF_GENERATE;
- *blockPtr = 0;
- return 0;
- }
-
- if (uw16_instr == BUFSTATS_DO_ACCELERATE)
- {
- /* In order to do a Accelerate we need at least 30 ms of data */
- if (dspInfo.samplesLeft >= (3 * 80 * fs_mult))
- {
- /* Already have enough data, so we do not need to extract any more */
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_ACCELERATE;
- *blockPtr = 0;
- inst->BufferStat_inst.Automode_inst.sampleMemory
- = (int32_t) dspInfo.samplesLeft;
- inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
- return 0;
- }
- else if ((dspInfo.samplesLeft >= (1 * 80 * fs_mult))
- && (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
- {
- /* Avoid decoding more data as it might overflow playout buffer */
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_NORMAL;
- *blockPtr = 0;
- return 0;
- }
- else if ((dspInfo.samplesLeft < (1 * 80 * fs_mult))
- && (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
- {
- /* For >= 30ms allow Accelerate with a decoding to avoid overflow in playout buffer */
- wantedNoOfTimeStamps = inst->timestampsPerCall;
- }
- else if (dspInfo.samplesLeft >= (2 * 80 * fs_mult))
- {
- /* We need to decode another 10 ms in order to do an Accelerate */
- wantedNoOfTimeStamps = inst->timestampsPerCall;
- }
- else
- {
- /*
- * Build up decoded data by decoding at least 20 ms of data.
- * Do not perform Accelerate yet, but wait until we only need to do one decoding.
- */
- wantedNoOfTimeStamps = 2 * inst->timestampsPerCall;
- uw16_instr = BUFSTATS_DO_NORMAL;
- }
- }
- else if (uw16_instr == BUFSTATS_DO_PREEMPTIVE_EXPAND)
- {
- /* In order to do a Preemptive Expand we need at least 30 ms of data */
- if (dspInfo.samplesLeft >= (3 * 80 * fs_mult))
- {
- /* Already have enough data, so we do not need to extract any more */
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_PREEMPTIVE_EXPAND;
- *blockPtr = 0;
- inst->BufferStat_inst.Automode_inst.sampleMemory
- = (int32_t) dspInfo.samplesLeft;
- inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
- return 0;
- }
- else if ((dspInfo.samplesLeft >= (1 * 80 * fs_mult))
- && (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
- {
- /*
- * Avoid decoding more data as it might overflow playout buffer;
- * still try Preemptive Expand though.
- */
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_PREEMPTIVE_EXPAND;
- *blockPtr = 0;
- inst->BufferStat_inst.Automode_inst.sampleMemory
- = (int32_t) dspInfo.samplesLeft;
- inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
- return 0;
- }
- else if ((dspInfo.samplesLeft < (1 * 80 * fs_mult))
- && (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
- {
- /*
- * For >= 30ms allow Preemptive Expand with a decoding to avoid overflow in
- * playout buffer
- */
- wantedNoOfTimeStamps = inst->timestampsPerCall;
- }
- else if (dspInfo.samplesLeft >= (2 * 80 * fs_mult))
- {
- /* We need to decode another 10 ms in order to do an Preemptive Expand */
- wantedNoOfTimeStamps = inst->timestampsPerCall;
- }
- else
- {
- /*
- * Build up decoded data by decoding at least 20 ms of data,
- * Still try to perform Preemptive Expand.
- */
- wantedNoOfTimeStamps = 2 * inst->timestampsPerCall;
- }
- }
- else
- {
- wantedNoOfTimeStamps = inst->timestampsPerCall;
- }
-
- /* Otherwise get data from buffer, try to get at least 10ms */
- totalTS = 0;
- oldTS = uw32_availableTS;
- if ((i_bufferpos > -1) && (uw16_instr != BUFSTATS_DO_ALTERNATIVE_PLC) && (uw16_instr
- != BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS) && (uw16_instr != BUFSTATS_DO_AUDIO_REPETITION)
- && (uw16_instr != BUFSTATS_DO_AUDIO_REPETITION_INC_TS))
- {
- uw32_tmp = (uw32_availableTS - dspInfo.playedOutTS);
- inst->pw16_writeAddress[1] = (uint16_t) (uw32_tmp >> 16);
- inst->pw16_writeAddress[2] = (uint16_t) (uw32_tmp & 0xFFFF);
- if (inst->BufferStat_inst.w16_cngOn == CNG_OFF)
- {
- /*
- * Adjustment of TS only corresponds to an actual packet loss
- * if comfort noise is not played. If comfort noise was just played,
- * this adjustment of TS is only done to get back in sync with the
- * stream TS; no loss to report.
- */
- inst->lostTS += uw32_tmp;
- }
-
- if (uw16_instr != BUFSTATS_DO_RFC3389CNG_PACKET)
- {
- /* We are about to decode and use a non-CNG packet => CNG period is ended */
- inst->BufferStat_inst.w16_cngOn = CNG_OFF;
- }
-
- /*
- * Reset CNG timestamp as a new packet will be delivered.
- * (Also if CNG packet, since playedOutTS is updated.)
- */
- inst->BufferStat_inst.uw32_CNGplayedTS = 0;
-
- prevSeqNo = inst->PacketBuffer_inst.seqNumber[i_bufferpos];
- prevTS = inst->PacketBuffer_inst.timeStamp[i_bufferpos];
- oldPT = inst->PacketBuffer_inst.payloadType[i_bufferpos];
-
- /* These values are used by NACK module to estimate time-to-play of
- * a missing packet. Occasionally, NetEq might decide to decode more
- * than one packet. Therefore, these values store sequence number and
- * timestamp of the first packet pulled from the packet buffer. In
- * such cases, these values do not exactly represent the sequence number
- * or timestamp associated with a 10ms audio pulled from NetEq. NACK
- * module is designed to compensate for this.
- */
- inst->decoded_packet_sequence_number = prevSeqNo;
- inst->decoded_packet_timestamp = prevTS;
-
- /* clear flag bits */
- inst->pw16_writeAddress[0] = inst->pw16_writeAddress[0] & 0xFF3F;
- do
- {
- int waitingTime;
- inst->timeStamp = uw32_availableTS;
- /* Write directly to shared memory */
- temp_pkt.payload = blockPtr + 1;
- i_res = WebRtcNetEQ_PacketBufferExtract(&inst->PacketBuffer_inst, &temp_pkt,
- i_bufferpos, &waitingTime);
-
- if (i_res < 0)
- {
- /* error returned */
- return i_res;
- }
- WebRtcNetEQ_StoreWaitingTime(inst, waitingTime);
-
-#ifdef NETEQ_DELAY_LOGGING
- temp_var = NETEQ_DELAY_LOGGING_SIGNAL_DECODE;
- if ((fwrite(&temp_var, sizeof(int),
- 1, delay_fid2) != 1) ||
- (fwrite(&temp_pkt.timeStamp, sizeof(uint32_t),
- 1, delay_fid2) != 1) ||
- (fwrite(&dspInfo.samplesLeft, sizeof(uint16_t),
- 1, delay_fid2) != 1)) {
- return -1;
- }
-#endif
-
- *blockPtr = temp_pkt.payloadLen;
- /* set the flag if this is a redundant payload */
- if (temp_pkt.rcuPlCntr > 0)
- {
- *blockPtr = (*blockPtr) | (DSP_CODEC_RED_FLAG);
- }
- blockPtr += ((temp_pkt.payloadLen + 1) >> 1) + 1;
-
- if (i_bufferpos > -1)
- {
- /*
- * Store number of TS extracted (last extracted is assumed to be of
- * packSizeSamples).
- */
- totalTS = uw32_availableTS - oldTS + inst->PacketBuffer_inst.packSizeSamples;
- }
- /* Check what next packet is available */
- WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
- inst->timeStamp, &uw32_availableTS, &i_bufferpos, 0, &payloadType);
-
- nextSeqNoAvail = 0;
- if ((i_bufferpos > -1) && (oldPT
- == inst->PacketBuffer_inst.payloadType[i_bufferpos]))
- {
- w16_tmp = inst->PacketBuffer_inst.seqNumber[i_bufferpos] - prevSeqNo;
- w32_tmp = inst->PacketBuffer_inst.timeStamp[i_bufferpos] - prevTS;
- if ((w16_tmp == 1) || /* Next packet */
- ((w16_tmp == 0) && (w32_tmp == inst->PacketBuffer_inst.packSizeSamples)))
- { /* or packet split into frames */
- nextSeqNoAvail = 1;
- }
- prevSeqNo = inst->PacketBuffer_inst.seqNumber[i_bufferpos];
- }
- /* Update the frame size, if we can. */
- inst->PacketBuffer_inst.packSizeSamples =
- WebRtcNetEQ_UpdatePackSizeSamples(inst, i_bufferpos,
- payloadType, inst->PacketBuffer_inst.packSizeSamples);
- }
- while ((totalTS < wantedNoOfTimeStamps) && (nextSeqNoAvail == 1));
- }
-
- if ((uw16_instr == BUFSTATS_DO_ACCELERATE)
- || (uw16_instr == BUFSTATS_DO_PREEMPTIVE_EXPAND))
- {
- /* Check that we have enough data (30ms) to do the Accelearate */
- if ((totalTS + dspInfo.samplesLeft) < WEBRTC_SPL_MUL(3,inst->timestampsPerCall)
- && (uw16_instr == BUFSTATS_DO_ACCELERATE))
- {
- /* Not enough, do normal operation instead */
- uw16_instr = BUFSTATS_DO_NORMAL;
- }
- else
- {
- inst->BufferStat_inst.Automode_inst.sampleMemory
- = (int32_t) dspInfo.samplesLeft + totalTS;
- inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
- }
- }
-
- /* Close the data with a zero size block */
- *blockPtr = 0;
-
- /* Write data to DSP */
- switch (uw16_instr)
- {
- case BUFSTATS_DO_NORMAL:
- /* Normal with decoding included */
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_NORMAL;
- break;
- case BUFSTATS_DO_ACCELERATE:
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_ACCELERATE;
- break;
- case BUFSTATS_DO_MERGE:
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_MERGE;
- break;
- case BUFSTATS_DO_RFC3389CNG_PACKET:
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_DO_RFC3389CNG;
- break;
- case BUFSTATS_DO_ALTERNATIVE_PLC:
- inst->pw16_writeAddress[1] = 0;
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_DO_ALTERNATIVE_PLC;
- break;
- case BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS:
- inst->pw16_writeAddress[1] = 0;
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_DO_ALTERNATIVE_PLC_INC_TS;
- break;
- case BUFSTATS_DO_AUDIO_REPETITION:
- inst->pw16_writeAddress[1] = 0;
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_DO_AUDIO_REPETITION;
- break;
- case BUFSTATS_DO_AUDIO_REPETITION_INC_TS:
- inst->pw16_writeAddress[1] = 0;
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_DO_AUDIO_REPETITION_INC_TS;
- break;
- case BUFSTATS_DO_PREEMPTIVE_EXPAND:
- inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
- | DSP_INSTR_PREEMPTIVE_EXPAND;
- break;
- default:
- return UNKNOWN_BUFSTAT_DECISION;
- }
-
- inst->timeStamp = dspInfo.playedOutTS;
- return 0;
-
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/split_and_insert.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/split_and_insert.c
deleted file mode 100644
index d7f17fdc8d4..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/split_and_insert.c
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Split an RTP payload (if possible and suitable) and insert into packet buffer.
- */
-
-#include "mcu.h"
-
-#include <string.h>
-
-#include "mcu_dsp_common.h"
-#include "neteq_error_codes.h"
-#include "signal_processing_library.h"
-
-int WebRtcNetEQ_SplitAndInsertPayload(RTPPacket_t* packet,
- PacketBuf_t* Buffer_inst,
- SplitInfo_t* split_inst,
- int16_t* flushed,
- int av_sync)
-{
-
- int i_ok;
- int len;
- int i;
- RTPPacket_t temp_packet;
- int16_t localFlushed = 0;
- const int16_t *pw16_startPayload;
- const int is_sync_rtp = av_sync &&
- WebRtcNetEQ_IsSyncPayload(packet->payload, packet->payloadLen);
- *flushed = 0;
-
- len = packet->payloadLen;
-
- /* Copy to temp packet that can be modified. */
-
- WEBRTC_SPL_MEMCPY_W8(&temp_packet,packet,sizeof(RTPPacket_t));
-
- if (split_inst->deltaBytes == NO_SPLIT ||
- is_sync_rtp) /* Don't split sync RTPs just insert. */
- {
- /* Not splittable codec */
- i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, packet,
- &localFlushed, av_sync);
- *flushed |= localFlushed;
- if (i_ok < 0)
- {
- return PBUFFER_INSERT_ERROR5;
- }
- }
- else if (split_inst->deltaBytes < -10)
- {
- /* G711, PCM16B or G722, use "soft splitting" */
- int split_size = packet->payloadLen;
- int mult = WEBRTC_SPL_ABS_W32(split_inst->deltaBytes) - 10;
-
- /* Find "chunk size" >= 20 ms and < 40 ms
- * split_inst->deltaTime in this case contains the number of bytes per
- * timestamp unit times 2
- */
- while (split_size >= ((80 << split_inst->deltaTime) * mult))
- {
- split_size >>= 1;
- }
-
- /* Make the size an even value. */
- if (split_size > 1)
- {
- split_size >>= 1;
- split_size *= 2;
- }
-
- temp_packet.payloadLen = split_size;
- pw16_startPayload = temp_packet.payload;
- i = 0;
- while (len >= (2 * split_size))
- {
- /* insert every chunk */
- i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet,
- &localFlushed, av_sync);
- *flushed |= localFlushed;
- temp_packet.timeStamp += ((2 * split_size) >> split_inst->deltaTime);
- i++;
- temp_packet.payload = &(pw16_startPayload[(i * split_size) >> 1]);
- temp_packet.starts_byte1 = temp_packet.starts_byte1 ^ (split_size & 0x1);
-
- len -= split_size;
- if (i_ok < 0)
- {
- return PBUFFER_INSERT_ERROR1;
- }
- }
-
- /* Insert the rest */
- temp_packet.payloadLen = len;
- i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet,
- &localFlushed, av_sync);
- *flushed |= localFlushed;
- if (i_ok < 0)
- {
- return PBUFFER_INSERT_ERROR2;
- }
- }
- else
- {
- /* Frame based codec, use hard splitting. */
- i = 0;
- pw16_startPayload = temp_packet.payload;
- while (len >= split_inst->deltaBytes)
- {
-
- temp_packet.payloadLen = split_inst->deltaBytes;
- i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet,
- &localFlushed, av_sync);
- *flushed |= localFlushed;
- i++;
- temp_packet.payload = &(pw16_startPayload[(i * split_inst->deltaBytes) >> 1]);
- temp_packet.timeStamp += split_inst->deltaTime;
- temp_packet.starts_byte1 = temp_packet.starts_byte1 ^ (split_inst->deltaBytes
- & 0x1);
-
- if (i_ok < 0)
- {
- return PBUFFER_INSERT_ERROR3;
- }
- len -= split_inst->deltaBytes;
-
- }
- if (len > 0)
- {
- /* Must be a either an error or a SID frame at the end of the packet. */
- temp_packet.payloadLen = len;
- i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet,
- &localFlushed, av_sync);
- *flushed |= localFlushed;
- if (i_ok < 0)
- {
- return PBUFFER_INSERT_ERROR4;
- }
- }
- }
-
- return 0;
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/statistics_calculator.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/statistics_calculator.cc
index b6e9222d475..383f7055549 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/statistics_calculator.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/statistics_calculator.cc
@@ -8,13 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/statistics_calculator.h"
+#include "webrtc/modules/audio_coding/neteq/statistics_calculator.h"
#include <assert.h>
#include <string.h> // memset
-#include "webrtc/modules/audio_coding/neteq4/decision_logic.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_manager.h"
+#include "webrtc/modules/audio_coding/neteq/decision_logic.h"
+#include "webrtc/modules/audio_coding/neteq/delay_manager.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/statistics_calculator.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/statistics_calculator.h
index 25f8a14bb9a..07ef8536fa8 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/statistics_calculator.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/statistics_calculator.h
@@ -8,13 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_STATISTICS_CALCULATOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_STATISTICS_CALCULATOR_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_STATISTICS_CALCULATOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_STATISTICS_CALCULATOR_H_
#include <vector>
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -106,4 +106,4 @@ class StatisticsCalculator {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_STATISTICS_CALCULATOR_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_STATISTICS_CALCULATOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer.cc
index 75ee6ece082..d1802e174fc 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer.cc
@@ -12,7 +12,7 @@
#include <algorithm> // Access to min.
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer.h
index e1e5daf1b78..59bd4d87e26 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer.h
@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_SYNC_BUFFER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_SYNC_BUFFER_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_SYNC_BUFFER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_SYNC_BUFFER_H_
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -78,7 +78,8 @@ class SyncBuffer : public AudioMultiVector {
// created.
void Flush();
- const AudioVector& Channel(size_t n) { return *channels_[n]; }
+ const AudioVector& Channel(size_t n) const { return *channels_[n]; }
+ AudioVector& Channel(size_t n) { return *channels_[n]; }
// Accessors and mutators.
size_t next_index() const { return next_index_; }
@@ -97,4 +98,4 @@ class SyncBuffer : public AudioMultiVector {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_SYNC_BUFFER_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_SYNC_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer_unittest.cc
index 1aafa22ab88..1a3d0fe781c 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/sync_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/sync_buffer_unittest.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/sync_buffer.h"
+#include "webrtc/modules/audio_coding/neteq/sync_buffer.h"
#include "gtest/gtest.h"
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.cc
index 5b6b3ba9666..a9228d49889 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.cc
@@ -8,13 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/time_stretch.h"
+#include "webrtc/modules/audio_coding/neteq/time_stretch.h"
#include <algorithm> // min, max
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
-#include "webrtc/modules/audio_coding/neteq4/dsp_helper.h"
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+#include "webrtc/modules/audio_coding/neteq/dsp_helper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
@@ -29,7 +29,7 @@ TimeStretch::ReturnCodes TimeStretch::Process(
int fs_mult_120 = fs_mult_ * 120; // Corresponds to 15 ms.
const int16_t* signal;
- scoped_array<int16_t> signal_array;
+ scoped_ptr<int16_t[]> signal_array;
size_t signal_len;
if (num_channels_ == 1) {
signal = input;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.h
index f0f58b83ad9..9396d8ff519 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch.h
@@ -8,14 +8,14 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TIME_STRETCH_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TIME_STRETCH_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TIME_STRETCH_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TIME_STRETCH_H_
#include <assert.h>
#include <string.h> // memset, size_t
-#include "webrtc/modules/audio_coding/neteq4/audio_multi_vector.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/audio_multi_vector.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -108,4 +108,4 @@ class TimeStretch {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TIME_STRETCH_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TIME_STRETCH_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch_unittest.cc
new file mode 100644
index 00000000000..64789b4d436
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/time_stretch_unittest.cc
@@ -0,0 +1,52 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for Accelerate and PreemptiveExpand classes.
+
+#include "webrtc/modules/audio_coding/neteq/accelerate.h"
+#include "webrtc/modules/audio_coding/neteq/preemptive_expand.h"
+
+#include "gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/background_noise.h"
+
+namespace webrtc {
+
+TEST(TimeStretch, CreateAndDestroy) {
+ const int kSampleRate = 8000;
+ const size_t kNumChannels = 1;
+ const int kOverlapSamples = 5 * kSampleRate / 8000;
+ BackgroundNoise bgn(kNumChannels);
+ Accelerate accelerate(kSampleRate, kNumChannels, bgn);
+ PreemptiveExpand preemptive_expand(
+ kSampleRate, kNumChannels, bgn, kOverlapSamples);
+}
+
+TEST(TimeStretch, CreateUsingFactory) {
+ const int kSampleRate = 8000;
+ const size_t kNumChannels = 1;
+ const int kOverlapSamples = 5 * kSampleRate / 8000;
+ BackgroundNoise bgn(kNumChannels);
+
+ AccelerateFactory accelerate_factory;
+ Accelerate* accelerate =
+ accelerate_factory.Create(kSampleRate, kNumChannels, bgn);
+ EXPECT_TRUE(accelerate != NULL);
+ delete accelerate;
+
+ PreemptiveExpandFactory preemptive_expand_factory;
+ PreemptiveExpand* preemptive_expand = preemptive_expand_factory.Create(
+ kSampleRate, kNumChannels, bgn, kOverlapSamples);
+ EXPECT_TRUE(preemptive_expand != NULL);
+ delete preemptive_expand;
+}
+
+// TODO(hlundin): Write more tests.
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler.cc
index b2b5b40a3a6..01890136a94 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler.cc
@@ -8,10 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/timestamp_scaler.h"
+#include "webrtc/modules/audio_coding/neteq/timestamp_scaler.h"
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/defines.h"
+#include "webrtc/modules/audio_coding/neteq/decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/defines.h"
#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler.h
index e165076a5e5..59b8cc7d1dc 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler.h
@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TIMESTAMP_SCALER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TIMESTAMP_SCALER_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TIMESTAMP_SCALER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TIMESTAMP_SCALER_H_
-#include "webrtc/modules/audio_coding/neteq4/packet.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -65,4 +65,4 @@ class TimestampScaler {
};
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TIMESTAMP_SCALER_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TIMESTAMP_SCALER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler_unittest.cc
index c676094672f..8cbbfa393ac 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/timestamp_scaler_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/timestamp_scaler_unittest.cc
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/timestamp_scaler.h"
+#include "webrtc/modules/audio_coding/neteq/timestamp_scaler.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/packet.h"
+#include "webrtc/modules/audio_coding/neteq/mock/mock_decoder_database.h"
+#include "webrtc/modules/audio_coding/neteq/packet.h"
using ::testing::Return;
using ::testing::ReturnNull;
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_checksum.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_checksum.h
new file mode 100644
index 00000000000..ac5682651b5
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_checksum.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_CHECKSUM_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_CHECKSUM_H_
+
+#include <string>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/md5digest.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/modules/audio_coding/neteq/tools/audio_sink.h"
+#include "webrtc/system_wrappers/interface/compile_assert.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+class AudioChecksum : public AudioSink {
+ public:
+ AudioChecksum() : finished_(false) {}
+
+ virtual bool WriteArray(const int16_t* audio, size_t num_samples) OVERRIDE {
+ if (finished_)
+ return false;
+
+#ifndef WEBRTC_ARCH_LITTLE_ENDIAN
+#error "Big-endian gives a different checksum"
+#endif
+ checksum_.Update(audio, num_samples * sizeof(*audio));
+ return true;
+ }
+
+ // Finalizes the computations, and returns the checksum.
+ std::string Finish() {
+ if (!finished_) {
+ finished_ = true;
+ checksum_.Finish(checksum_result_, rtc::Md5Digest::kSize);
+ }
+ return rtc::hex_encode(checksum_result_, rtc::Md5Digest::kSize);
+ }
+
+ private:
+ rtc::Md5Digest checksum_;
+ char checksum_result_[rtc::Md5Digest::kSize];
+ bool finished_;
+
+ DISALLOW_COPY_AND_ASSIGN(AudioChecksum);
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_CHECKSUM_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/audio_loop.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_loop.cc
index 94ea5bef015..2d2a7e3dd4a 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/audio_loop.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_loop.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/tools/audio_loop.h"
+#include "webrtc/modules/audio_coding/neteq/tools/audio_loop.h"
#include <assert.h>
#include <stdio.h>
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/audio_loop.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_loop.h
index 038ca370e72..9647d827ac9 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/audio_loop.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_loop.h
@@ -8,12 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_AUDIO_LOOP_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_AUDIO_LOOP_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_LOOP_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_LOOP_H_
#include <string>
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
@@ -27,8 +27,7 @@ class AudioLoop {
AudioLoop()
: next_index_(0),
loop_length_samples_(0),
- block_length_samples_(0),
- audio_array_(NULL) {
+ block_length_samples_(0) {
}
virtual ~AudioLoop() {}
@@ -50,11 +49,11 @@ class AudioLoop {
size_t next_index_;
size_t loop_length_samples_;
size_t block_length_samples_;
- scoped_array<int16_t> audio_array_;
+ scoped_ptr<int16_t[]> audio_array_;
DISALLOW_COPY_AND_ASSIGN(AudioLoop);
};
} // namespace test
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_AUDIO_LOOP_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_LOOP_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_sink.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_sink.h
new file mode 100644
index 00000000000..5743c3641de
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/audio_sink.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_SINK_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_SINK_H_
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+// Interface class for an object receiving raw output audio from test
+// applications.
+class AudioSink {
+ public:
+ AudioSink() {}
+ virtual ~AudioSink() {}
+
+ // Writes |num_samples| from |audio| to the AudioSink. Returns true if
+ // successful, otherwise false.
+ virtual bool WriteArray(const int16_t* audio, size_t num_samples) = 0;
+
+ // Writes |audio_frame| to the AudioSink. Returns true if successful,
+ // otherwise false.
+ bool WriteAudioFrame(const AudioFrame& audio_frame) {
+ return WriteArray(
+ audio_frame.data_,
+ audio_frame.samples_per_channel_ * audio_frame.num_channels_);
+ }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(AudioSink);
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_AUDIO_SINK_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/input_audio_file.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/input_audio_file.cc
index 62692e27dc4..806317320f8 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/input_audio_file.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/input_audio_file.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/audio_coding/neteq4/tools/input_audio_file.h"
+#include "webrtc/modules/audio_coding/neteq/tools/input_audio_file.h"
namespace webrtc {
namespace test {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/input_audio_file.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/input_audio_file.h
index de51ff88b8c..274f8ea07e5 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/input_audio_file.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/input_audio_file.h
@@ -8,14 +8,14 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_INPUT_AUDIO_FILE_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_INPUT_AUDIO_FILE_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_INPUT_AUDIO_FILE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_INPUT_AUDIO_FILE_H_
#include <stdio.h>
#include <string>
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -48,4 +48,4 @@ class InputAudioFile {
} // namespace test
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_INPUT_AUDIO_FILE_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_INPUT_AUDIO_FILE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc
new file mode 100644
index 00000000000..433546fbcbd
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.cc
@@ -0,0 +1,132 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.h"
+
+#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/tools/audio_loop.h"
+#include "webrtc/modules/audio_coding/neteq/tools/rtp_generator.h"
+#include "webrtc/system_wrappers/interface/clock.h"
+#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/typedefs.h"
+
+using webrtc::NetEq;
+using webrtc::test::AudioLoop;
+using webrtc::test::RtpGenerator;
+using webrtc::WebRtcRTPHeader;
+
+namespace webrtc {
+namespace test {
+
+int64_t NetEqPerformanceTest::Run(int runtime_ms,
+ int lossrate,
+ double drift_factor) {
+ const std::string kInputFileName =
+ webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+ const int kSampRateHz = 32000;
+ const webrtc::NetEqDecoder kDecoderType = webrtc::kDecoderPCM16Bswb32kHz;
+ const int kPayloadType = 95;
+
+ // Initialize NetEq instance.
+ NetEq::Config config;
+ config.sample_rate_hz = kSampRateHz;
+ NetEq* neteq = NetEq::Create(config);
+ // Register decoder in |neteq|.
+ if (neteq->RegisterPayloadType(kDecoderType, kPayloadType) != 0)
+ return -1;
+
+ // Set up AudioLoop object.
+ AudioLoop audio_loop;
+ const size_t kMaxLoopLengthSamples = kSampRateHz * 10; // 10 second loop.
+ const size_t kInputBlockSizeSamples = 60 * kSampRateHz / 1000; // 60 ms.
+ if (!audio_loop.Init(kInputFileName, kMaxLoopLengthSamples,
+ kInputBlockSizeSamples))
+ return -1;
+
+ int32_t time_now_ms = 0;
+
+ // Get first input packet.
+ WebRtcRTPHeader rtp_header;
+ RtpGenerator rtp_gen(kSampRateHz / 1000);
+ // Start with positive drift first half of simulation.
+ rtp_gen.set_drift_factor(drift_factor);
+ bool drift_flipped = false;
+ int32_t packet_input_time_ms =
+ rtp_gen.GetRtpHeader(kPayloadType, kInputBlockSizeSamples, &rtp_header);
+ const int16_t* input_samples = audio_loop.GetNextBlock();
+ if (!input_samples) exit(1);
+ uint8_t input_payload[kInputBlockSizeSamples * sizeof(int16_t)];
+ int payload_len = WebRtcPcm16b_Encode(const_cast<int16_t*>(input_samples),
+ kInputBlockSizeSamples,
+ input_payload);
+ assert(payload_len == kInputBlockSizeSamples * sizeof(int16_t));
+
+ // Main loop.
+ webrtc::Clock* clock = webrtc::Clock::GetRealTimeClock();
+ int64_t start_time_ms = clock->TimeInMilliseconds();
+ while (time_now_ms < runtime_ms) {
+ while (packet_input_time_ms <= time_now_ms) {
+ // Drop every N packets, where N = FLAGS_lossrate.
+ bool lost = false;
+ if (lossrate > 0) {
+ lost = ((rtp_header.header.sequenceNumber - 1) % lossrate) == 0;
+ }
+ if (!lost) {
+ // Insert packet.
+ int error = neteq->InsertPacket(
+ rtp_header, input_payload, payload_len,
+ packet_input_time_ms * kSampRateHz / 1000);
+ if (error != NetEq::kOK)
+ return -1;
+ }
+
+ // Get next packet.
+ packet_input_time_ms = rtp_gen.GetRtpHeader(kPayloadType,
+ kInputBlockSizeSamples,
+ &rtp_header);
+ input_samples = audio_loop.GetNextBlock();
+ if (!input_samples) return -1;
+ payload_len = WebRtcPcm16b_Encode(const_cast<int16_t*>(input_samples),
+ kInputBlockSizeSamples,
+ input_payload);
+ assert(payload_len == kInputBlockSizeSamples * sizeof(int16_t));
+ }
+
+ // Get output audio, but don't do anything with it.
+ static const int kMaxChannels = 1;
+ static const int kMaxSamplesPerMs = 48000 / 1000;
+ static const int kOutputBlockSizeMs = 10;
+ static const int kOutDataLen = kOutputBlockSizeMs * kMaxSamplesPerMs *
+ kMaxChannels;
+ int16_t out_data[kOutDataLen];
+ int num_channels;
+ int samples_per_channel;
+ int error = neteq->GetAudio(kOutDataLen, out_data, &samples_per_channel,
+ &num_channels, NULL);
+ if (error != NetEq::kOK)
+ return -1;
+
+ assert(samples_per_channel == kSampRateHz * 10 / 1000);
+
+ time_now_ms += kOutputBlockSizeMs;
+ if (time_now_ms >= runtime_ms / 2 && !drift_flipped) {
+ // Apply negative drift second half of simulation.
+ rtp_gen.set_drift_factor(-drift_factor);
+ drift_flipped = true;
+ }
+ }
+ int64_t end_time_ms = clock->TimeInMilliseconds();
+ delete neteq;
+ return end_time_ms - start_time_ms;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.h
new file mode 100644
index 00000000000..d094db0f9b3
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_performance_test.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_PERFORMANCE_TEST_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_PERFORMANCE_TEST_H_
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+class NetEqPerformanceTest {
+ public:
+ // Runs a performance test with parameters as follows:
+ // |runtime_ms|: the simulation time, i.e., the duration of the audio data.
+ // |lossrate|: drop one out of |lossrate| packets, e.g., one out of 10.
+ // |drift_factor|: clock drift in [0, 1].
+ // Returns the runtime in ms.
+ static int64_t Run(int runtime_ms, int lossrate, double drift_factor);
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_PERFORMANCE_TEST_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc
new file mode 100644
index 00000000000..fc5d8abaf82
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.cc
@@ -0,0 +1,115 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include "webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.h"
+
+namespace webrtc {
+namespace test {
+
+const uint8_t kPayloadType = 95;
+const int kOutputSizeMs = 10;
+
+NetEqQualityTest::NetEqQualityTest(int block_duration_ms,
+ int in_sampling_khz,
+ int out_sampling_khz,
+ enum NetEqDecoder decoder_type,
+ int channels,
+ double drift_factor,
+ std::string in_filename,
+ std::string out_filename)
+ : decoded_time_ms_(0),
+ decodable_time_ms_(0),
+ drift_factor_(drift_factor),
+ block_duration_ms_(block_duration_ms),
+ in_sampling_khz_(in_sampling_khz),
+ out_sampling_khz_(out_sampling_khz),
+ decoder_type_(decoder_type),
+ channels_(channels),
+ in_filename_(in_filename),
+ out_filename_(out_filename),
+ in_size_samples_(in_sampling_khz_ * block_duration_ms_),
+ out_size_samples_(out_sampling_khz_ * kOutputSizeMs),
+ payload_size_bytes_(0),
+ max_payload_bytes_(0),
+ in_file_(new InputAudioFile(in_filename_)),
+ out_file_(NULL),
+ rtp_generator_(new RtpGenerator(in_sampling_khz_, 0, 0,
+ decodable_time_ms_)) {
+ NetEq::Config config;
+ config.sample_rate_hz = out_sampling_khz_ * 1000;
+ neteq_.reset(NetEq::Create(config));
+ max_payload_bytes_ = in_size_samples_ * channels_ * sizeof(int16_t);
+ in_data_.reset(new int16_t[in_size_samples_ * channels_]);
+ payload_.reset(new uint8_t[max_payload_bytes_]);
+ out_data_.reset(new int16_t[out_size_samples_ * channels_]);
+}
+
+void NetEqQualityTest::SetUp() {
+ out_file_ = fopen(out_filename_.c_str(), "wb");
+ ASSERT_TRUE(out_file_ != NULL);
+ ASSERT_EQ(0, neteq_->RegisterPayloadType(decoder_type_, kPayloadType));
+ rtp_generator_->set_drift_factor(drift_factor_);
+}
+
+void NetEqQualityTest::TearDown() {
+ fclose(out_file_);
+}
+
+int NetEqQualityTest::Transmit() {
+ int packet_input_time_ms =
+ rtp_generator_->GetRtpHeader(kPayloadType, in_size_samples_,
+ &rtp_header_);
+ if (!PacketLost(packet_input_time_ms) && payload_size_bytes_ > 0) {
+ int ret = neteq_->InsertPacket(rtp_header_, &payload_[0],
+ payload_size_bytes_,
+ packet_input_time_ms * in_sampling_khz_);
+ if (ret != NetEq::kOK)
+ return -1;
+ }
+ return packet_input_time_ms;
+}
+
+int NetEqQualityTest::DecodeBlock() {
+ int channels;
+ int samples;
+ int ret = neteq_->GetAudio(out_size_samples_ * channels_, &out_data_[0],
+ &samples, &channels, NULL);
+
+ if (ret != NetEq::kOK) {
+ return -1;
+ } else {
+ assert(channels == channels_);
+ assert(samples == kOutputSizeMs * out_sampling_khz_);
+ fwrite(&out_data_[0], sizeof(int16_t), samples * channels, out_file_);
+ return samples;
+ }
+}
+
+void NetEqQualityTest::Simulate(int end_time_ms) {
+ int audio_size_samples;
+
+ while (decoded_time_ms_ < end_time_ms) {
+ while (decodable_time_ms_ - kOutputSizeMs < decoded_time_ms_) {
+ ASSERT_TRUE(in_file_->Read(in_size_samples_ * channels_, &in_data_[0]));
+ payload_size_bytes_ = EncodeBlock(&in_data_[0],
+ in_size_samples_, &payload_[0],
+ max_payload_bytes_);
+ decodable_time_ms_ = Transmit() + block_duration_ms_;
+ }
+ audio_size_samples = DecodeBlock();
+ if (audio_size_samples > 0) {
+ decoded_time_ms_ += audio_size_samples / out_sampling_khz_;
+ }
+ }
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.h
new file mode 100644
index 00000000000..87fc50794ca
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_quality_test.h
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_QUALITY_TEST_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_QUALITY_TEST_H_
+
+#include <string>
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/tools/input_audio_file.h"
+#include "webrtc/modules/audio_coding/neteq/tools/rtp_generator.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+class NetEqQualityTest : public ::testing::Test {
+ protected:
+ NetEqQualityTest(int block_duration_ms,
+ int in_sampling_khz,
+ int out_sampling_khz,
+ enum NetEqDecoder decoder_type,
+ int channels,
+ double drift_factor,
+ std::string in_filename,
+ std::string out_filename);
+ virtual void SetUp() OVERRIDE;
+ virtual void TearDown() OVERRIDE;
+
+ // EncodeBlock(...) does the following:
+ // 1. encodes a block of audio, saved in |in_data| and has a length of
+ // |block_size_samples| (samples per channel),
+ // 2. save the bit stream to |payload| of |max_bytes| bytes in size,
+ // 3. returns the length of the payload (in bytes),
+ virtual int EncodeBlock(int16_t* in_data, int block_size_samples,
+ uint8_t* payload, int max_bytes) = 0;
+
+ // PacketLoss(...) determines weather a packet sent at an indicated time gets
+ // lost or not.
+ virtual bool PacketLost(int packet_input_time_ms) { return false; }
+
+ // DecodeBlock() decodes a block of audio using the payload stored in
+ // |payload_| with the length of |payload_size_bytes_| (bytes). The decoded
+ // audio is to be stored in |out_data_|.
+ int DecodeBlock();
+
+ // Transmit() uses |rtp_generator_| to generate a packet and passes it to
+ // |neteq_|.
+ int Transmit();
+
+ // Simulate(...) runs encoding / transmitting / decoding up to |end_time_ms|
+ // (miliseconds), the resulted audio is stored in the file with the name of
+ // |out_filename_|.
+ void Simulate(int end_time_ms);
+
+ private:
+ int decoded_time_ms_;
+ int decodable_time_ms_;
+ double drift_factor_;
+ const int block_duration_ms_;
+ const int in_sampling_khz_;
+ const int out_sampling_khz_;
+ const enum NetEqDecoder decoder_type_;
+ const int channels_;
+ const std::string in_filename_;
+ const std::string out_filename_;
+
+ // Number of samples per channel in a frame.
+ const int in_size_samples_;
+
+ // Expected output number of samples per channel in a frame.
+ const int out_size_samples_;
+
+ int payload_size_bytes_;
+ int max_payload_bytes_;
+
+ scoped_ptr<InputAudioFile> in_file_;
+ FILE* out_file_;
+
+ scoped_ptr<RtpGenerator> rtp_generator_;
+ scoped_ptr<NetEq> neteq_;
+
+ scoped_ptr<int16_t[]> in_data_;
+ scoped_ptr<uint8_t[]> payload_;
+ scoped_ptr<int16_t[]> out_data_;
+ WebRtcRTPHeader rtp_header_;
+};
+
+} // namespace test
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_NETEQ_QUALITY_TEST_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/neteq_rtpplay.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc
index f0ca51f2f70..3c5f6b0bbaa 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/neteq_rtpplay.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/neteq_rtpplay.cc
@@ -8,6 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+// TODO(hlundin): The functionality in this file should be moved into one or
+// several classes.
+
#include <assert.h>
#include <stdio.h>
@@ -16,10 +19,13 @@
#include <string>
#include "google/gflags.h"
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.h"
-#include "webrtc/modules/audio_coding/neteq4/test/NETEQTEST_DummyRTPpacket.h"
+#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#include "webrtc/modules/audio_coding/neteq/interface/neteq.h"
+#include "webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h"
+#include "webrtc/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.h"
+#include "webrtc/modules/audio_coding/neteq/tools/input_audio_file.h"
#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/typedefs.h"
@@ -88,11 +94,23 @@ DEFINE_bool(codec_map, false, "Prints the mapping between RTP payload type and "
"codec");
DEFINE_bool(dummy_rtp, false, "The input file contains ""dummy"" RTP data, "
"i.e., only headers");
+DEFINE_string(replacement_audio_file, "",
+ "A PCM file that will be used to populate ""dummy"" RTP packets");
// Declaring helper functions (defined further down in this file).
std::string CodecName(webrtc::NetEqDecoder codec);
void RegisterPayloadTypes(NetEq* neteq);
void PrintCodecMapping();
+size_t ReplacePayload(webrtc::test::InputAudioFile* replacement_audio_file,
+ webrtc::scoped_ptr<int16_t[]>* replacement_audio,
+ webrtc::scoped_ptr<uint8_t[]>* payload,
+ size_t* payload_mem_size_bytes,
+ size_t* frame_size_samples,
+ WebRtcRTPHeader* rtp_header,
+ NETEQTEST_RTPpacket* next_rtp);
+int CodecSampleRate(uint8_t payload_type);
+int CodecTimestampRate(uint8_t payload_type);
+bool IsComfortNosie(uint8_t payload_type);
int main(int argc, char* argv[]) {
static const int kMaxChannels = 5;
@@ -135,6 +153,15 @@ int main(int argc, char* argv[]) {
}
std::cout << "Output file: " << argv[2] << std::endl;
+ // Check if a replacement audio file was provided, and if so, open it.
+ bool replace_payload = false;
+ webrtc::scoped_ptr<webrtc::test::InputAudioFile> replacement_audio_file;
+ if (!FLAGS_replacement_audio_file.empty()) {
+ replacement_audio_file.reset(
+ new webrtc::test::InputAudioFile(FLAGS_replacement_audio_file));
+ replace_payload = true;
+ }
+
// Read RTP file header.
if (NETEQTEST_RTPpacket::skipFileHeader(in_file) != 0) {
std::cerr << "Wrong format in RTP file" << std::endl;
@@ -149,21 +176,47 @@ int main(int argc, char* argv[]) {
// Initialize NetEq instance.
int sample_rate_hz = 16000;
- NetEq* neteq = NetEq::Create(sample_rate_hz);
+ NetEq::Config config;
+ config.sample_rate_hz = sample_rate_hz;
+ NetEq* neteq = NetEq::Create(config);
RegisterPayloadTypes(neteq);
// Read first packet.
- NETEQTEST_RTPpacket *rtp;
+ NETEQTEST_RTPpacket* rtp;
+ NETEQTEST_RTPpacket* next_rtp = NULL;
if (!FLAGS_dummy_rtp) {
rtp = new NETEQTEST_RTPpacket();
+ if (replace_payload) {
+ next_rtp = new NETEQTEST_RTPpacket();
+ }
} else {
rtp = new NETEQTEST_DummyRTPpacket();
+ if (replace_payload) {
+ next_rtp = new NETEQTEST_DummyRTPpacket();
+ }
}
rtp->readFromFile(in_file);
- if (!rtp) {
+ if (rtp->dataLen() < 0) {
std::cout << "Warning: RTP file is empty" << std::endl;
}
+ // Set up variables for audio replacement if needed.
+ size_t input_frame_size_timestamps = 0;
+ webrtc::scoped_ptr<int16_t[]> replacement_audio;
+ webrtc::scoped_ptr<uint8_t[]> payload;
+ size_t payload_mem_size_bytes = 0;
+ if (replace_payload) {
+ // Initially assume that the frame size is 30 ms at the initial sample rate.
+ // This value will be replaced with the correct one as soon as two
+ // consecutive packets are found.
+ input_frame_size_timestamps = 30 * sample_rate_hz / 1000;
+ replacement_audio.reset(new int16_t[input_frame_size_timestamps]);
+ payload_mem_size_bytes = 2 * input_frame_size_timestamps;
+ payload.reset(new uint8_t[payload_mem_size_bytes]);
+ assert(next_rtp);
+ next_rtp->readFromFile(in_file);
+ }
+
// This is the main simulation loop.
int time_now_ms = rtp->time(); // Start immediately with the first packet.
int next_input_time_ms = rtp->time();
@@ -181,9 +234,21 @@ int main(int argc, char* argv[]) {
// Parse RTP header.
WebRtcRTPHeader rtp_header;
rtp->parseHeader(&rtp_header);
- int error = neteq->InsertPacket(rtp_header, rtp->payload(),
- rtp->payloadLen(),
- rtp->time() * sample_rate_hz / 1000);
+ uint8_t* payload_ptr = rtp->payload();
+ size_t payload_len = rtp->payloadLen();
+ if (replace_payload) {
+ payload_len = ReplacePayload(replacement_audio_file.get(),
+ &replacement_audio,
+ &payload,
+ &payload_mem_size_bytes,
+ &input_frame_size_timestamps,
+ &rtp_header,
+ next_rtp);
+ payload_ptr = payload.get();
+ }
+ int error = neteq->InsertPacket(rtp_header, payload_ptr,
+ static_cast<int>(payload_len),
+ rtp->time() * sample_rate_hz / 1000);
if (error != NetEq::kOK) {
std::cerr << "InsertPacket returned error code " <<
neteq->LastError() << std::endl;
@@ -191,6 +256,13 @@ int main(int argc, char* argv[]) {
}
// Get next packet from file.
rtp->readFromFile(in_file);
+ if (replace_payload) {
+ // At this point |rtp| contains the packet *after* |next_rtp|.
+ // Swap RTP packet objects between |rtp| and |next_rtp|.
+ NETEQTEST_RTPpacket* temp_rtp = rtp;
+ rtp = next_rtp;
+ next_rtp = temp_rtp;
+ }
next_input_time_ms = rtp->time();
}
@@ -212,6 +284,7 @@ int main(int argc, char* argv[]) {
}
// Write to file.
+ // TODO(hlundin): Make writing to file optional.
size_t write_len = samples_per_channel * num_channels;
if (fwrite(out_data, sizeof(out_data[0]), write_len, out_file) !=
write_len) {
@@ -229,6 +302,8 @@ int main(int argc, char* argv[]) {
fclose(in_file);
fclose(out_file);
+ delete rtp;
+ delete next_rtp;
delete neteq;
webrtc::Trace::ReturnTrace();
return 0;
@@ -421,3 +496,133 @@ void PrintCodecMapping() {
std::cout << CodecName(webrtc::kDecoderCNGswb48kHz).c_str() << ": " <<
FLAGS_cn_swb48 << std::endl;
}
+
+size_t ReplacePayload(webrtc::test::InputAudioFile* replacement_audio_file,
+ webrtc::scoped_ptr<int16_t[]>* replacement_audio,
+ webrtc::scoped_ptr<uint8_t[]>* payload,
+ size_t* payload_mem_size_bytes,
+ size_t* frame_size_samples,
+ WebRtcRTPHeader* rtp_header,
+ NETEQTEST_RTPpacket* next_rtp) {
+ size_t payload_len = 0;
+ // Check for CNG.
+ if (IsComfortNosie(rtp_header->header.payloadType)) {
+ // If CNG, simply insert a zero-energy one-byte payload.
+ if (*payload_mem_size_bytes < 1) {
+ (*payload).reset(new uint8_t[1]);
+ *payload_mem_size_bytes = 1;
+ }
+ (*payload)[0] = 127; // Max attenuation of CNG.
+ payload_len = 1;
+ } else {
+ if (next_rtp->payloadLen() > 0) {
+ // Check if payload length has changed.
+ if (next_rtp->sequenceNumber() == rtp_header->header.sequenceNumber + 1) {
+ if (*frame_size_samples !=
+ next_rtp->timeStamp() - rtp_header->header.timestamp) {
+ *frame_size_samples =
+ next_rtp->timeStamp() - rtp_header->header.timestamp;
+ (*replacement_audio).reset(
+ new int16_t[*frame_size_samples]);
+ *payload_mem_size_bytes = 2 * *frame_size_samples;
+ (*payload).reset(new uint8_t[*payload_mem_size_bytes]);
+ }
+ }
+ }
+ // Get new speech.
+ assert((*replacement_audio).get());
+ if (CodecTimestampRate(rtp_header->header.payloadType) !=
+ CodecSampleRate(rtp_header->header.payloadType) ||
+ rtp_header->header.payloadType == FLAGS_red ||
+ rtp_header->header.payloadType == FLAGS_avt) {
+ // Some codecs have different sample and timestamp rates. And neither
+ // RED nor DTMF is supported for replacement.
+ std::cerr << "Codec not supported for audio replacement." <<
+ std::endl;
+ webrtc::Trace::ReturnTrace();
+ exit(1);
+ }
+ assert(*frame_size_samples > 0);
+ if (!replacement_audio_file->Read(*frame_size_samples,
+ (*replacement_audio).get())) {
+ std::cerr << "Could no read replacement audio file." << std::endl;
+ webrtc::Trace::ReturnTrace();
+ exit(1);
+ }
+ // Encode it as PCM16.
+ assert((*payload).get());
+ payload_len = WebRtcPcm16b_Encode((*replacement_audio).get(),
+ static_cast<int16_t>(*frame_size_samples),
+ (*payload).get());
+ assert(payload_len == 2 * *frame_size_samples);
+ // Change payload type to PCM16.
+ switch (CodecSampleRate(rtp_header->header.payloadType)) {
+ case 8000:
+ rtp_header->header.payloadType = FLAGS_pcm16b;
+ break;
+ case 16000:
+ rtp_header->header.payloadType = FLAGS_pcm16b_wb;
+ break;
+ case 32000:
+ rtp_header->header.payloadType = FLAGS_pcm16b_swb32;
+ break;
+ case 48000:
+ rtp_header->header.payloadType = FLAGS_pcm16b_swb48;
+ break;
+ default:
+ std::cerr << "Payload type " <<
+ static_cast<int>(rtp_header->header.payloadType) <<
+ " not supported or unknown." << std::endl;
+ webrtc::Trace::ReturnTrace();
+ exit(1);
+ assert(false);
+ }
+ }
+ return payload_len;
+}
+
+int CodecSampleRate(uint8_t payload_type) {
+ if (payload_type == FLAGS_pcmu ||
+ payload_type == FLAGS_pcma ||
+ payload_type == FLAGS_ilbc ||
+ payload_type == FLAGS_pcm16b ||
+ payload_type == FLAGS_cn_nb) {
+ return 8000;
+ } else if (payload_type == FLAGS_isac ||
+ payload_type == FLAGS_pcm16b_wb ||
+ payload_type == FLAGS_g722 ||
+ payload_type == FLAGS_cn_wb) {
+ return 16000;
+ } else if (payload_type == FLAGS_isac_swb ||
+ payload_type == FLAGS_pcm16b_swb32 ||
+ payload_type == FLAGS_cn_swb32) {
+ return 32000;
+ } else if (payload_type == FLAGS_pcm16b_swb48 ||
+ payload_type == FLAGS_cn_swb48) {
+ return 48000;
+ } else if (payload_type == FLAGS_avt ||
+ payload_type == FLAGS_red) {
+ return 0;
+ } else {
+ return -1;
+ }
+}
+
+int CodecTimestampRate(uint8_t payload_type) {
+ if (payload_type == FLAGS_g722) {
+ return 8000;
+ } else {
+ return CodecSampleRate(payload_type);
+ }
+}
+
+bool IsComfortNosie(uint8_t payload_type) {
+ if (payload_type == FLAGS_cn_nb ||
+ payload_type == FLAGS_cn_wb ||
+ payload_type == FLAGS_cn_swb32 ||
+ payload_type == FLAGS_cn_swb48) {
+ return true;
+ } else {
+ return false;
+ }
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/output_audio_file.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/output_audio_file.h
new file mode 100644
index 00000000000..1d6128076e1
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/output_audio_file.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_OUTPUT_AUDIO_FILE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_OUTPUT_AUDIO_FILE_H_
+
+#include <assert.h>
+#include <stdio.h>
+#include <string>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/audio_coding/neteq/tools/audio_sink.h"
+
+namespace webrtc {
+namespace test {
+
+class OutputAudioFile : public AudioSink {
+ public:
+ // Creates an OutputAudioFile, opening a file named |file_name| for writing.
+ // The file format is 16-bit signed host-endian PCM.
+ explicit OutputAudioFile(const std::string& file_name) {
+ out_file_ = fopen(file_name.c_str(), "wb");
+ }
+
+ virtual ~OutputAudioFile() {
+ if (out_file_)
+ fclose(out_file_);
+ }
+
+ virtual bool WriteArray(const int16_t* audio, size_t num_samples) OVERRIDE {
+ assert(out_file_);
+ return fwrite(audio, sizeof(*audio), num_samples, out_file_) == num_samples;
+ }
+
+ private:
+ FILE* out_file_;
+
+ DISALLOW_COPY_AND_ASSIGN(OutputAudioFile);
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_OUTPUT_AUDIO_FILE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet.cc
new file mode 100644
index 00000000000..d8fb7134f10
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet.cc
@@ -0,0 +1,155 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+
+namespace webrtc {
+namespace test {
+
+Packet::Packet(uint8_t* packet_memory,
+ size_t allocated_bytes,
+ double time_ms,
+ const RtpHeaderParser& parser)
+ : payload_memory_(packet_memory),
+ payload_(NULL),
+ packet_length_bytes_(allocated_bytes),
+ payload_length_bytes_(0),
+ virtual_packet_length_bytes_(allocated_bytes),
+ virtual_payload_length_bytes_(0),
+ time_ms_(time_ms) {
+ valid_header_ = ParseHeader(parser);
+}
+
+Packet::Packet(uint8_t* packet_memory,
+ size_t allocated_bytes,
+ size_t virtual_packet_length_bytes,
+ double time_ms,
+ const RtpHeaderParser& parser)
+ : payload_memory_(packet_memory),
+ payload_(NULL),
+ packet_length_bytes_(allocated_bytes),
+ payload_length_bytes_(0),
+ virtual_packet_length_bytes_(virtual_packet_length_bytes),
+ virtual_payload_length_bytes_(0),
+ time_ms_(time_ms) {
+ valid_header_ = ParseHeader(parser);
+}
+
+Packet::Packet(uint8_t* packet_memory, size_t allocated_bytes, double time_ms)
+ : payload_memory_(packet_memory),
+ payload_(NULL),
+ packet_length_bytes_(allocated_bytes),
+ payload_length_bytes_(0),
+ virtual_packet_length_bytes_(allocated_bytes),
+ virtual_payload_length_bytes_(0),
+ time_ms_(time_ms) {
+ scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ valid_header_ = ParseHeader(*parser);
+}
+
+Packet::Packet(uint8_t* packet_memory,
+ size_t allocated_bytes,
+ size_t virtual_packet_length_bytes,
+ double time_ms)
+ : payload_memory_(packet_memory),
+ payload_(NULL),
+ packet_length_bytes_(allocated_bytes),
+ payload_length_bytes_(0),
+ virtual_packet_length_bytes_(virtual_packet_length_bytes),
+ virtual_payload_length_bytes_(0),
+ time_ms_(time_ms) {
+ scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ valid_header_ = ParseHeader(*parser);
+}
+
+bool Packet::ExtractRedHeaders(std::list<RTPHeader*>* headers) const {
+ //
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // |1| block PT | timestamp offset | block length |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // |1| ... |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // |0| block PT |
+ // +-+-+-+-+-+-+-+-+
+ //
+
+ assert(payload_);
+ const uint8_t* payload_ptr = payload_;
+ const uint8_t* payload_end_ptr = payload_ptr + payload_length_bytes_;
+
+ // Find all RED headers with the extension bit set to 1. That is, all headers
+ // but the last one.
+ while ((payload_ptr < payload_end_ptr) && (*payload_ptr & 0x80)) {
+ RTPHeader* header = new RTPHeader;
+ CopyToHeader(header);
+ header->payloadType = payload_ptr[0] & 0x7F;
+ uint32_t offset = (payload_ptr[1] << 6) + ((payload_ptr[2] & 0xFC) >> 2);
+ header->timestamp -= offset;
+ headers->push_front(header);
+ payload_ptr += 4;
+ }
+ // Last header.
+ assert(payload_ptr < payload_end_ptr);
+ if (payload_ptr >= payload_end_ptr) {
+ return false; // Payload too short.
+ }
+ RTPHeader* header = new RTPHeader;
+ CopyToHeader(header);
+ header->payloadType = payload_ptr[0] & 0x7F;
+ headers->push_front(header);
+ return true;
+}
+
+void Packet::DeleteRedHeaders(std::list<RTPHeader*>* headers) {
+ while (!headers->empty()) {
+ delete headers->front();
+ headers->pop_front();
+ }
+}
+
+bool Packet::ParseHeader(const RtpHeaderParser& parser) {
+ bool valid_header = parser.Parse(
+ payload_memory_.get(), static_cast<int>(packet_length_bytes_), &header_);
+ assert(valid_header);
+ if (!valid_header) {
+ return false;
+ }
+ assert(header_.headerLength <= packet_length_bytes_);
+ payload_ = &payload_memory_[header_.headerLength];
+ assert(packet_length_bytes_ >= header_.headerLength);
+ payload_length_bytes_ = packet_length_bytes_ - header_.headerLength;
+ assert(virtual_packet_length_bytes_ >= header_.headerLength);
+ virtual_payload_length_bytes_ =
+ virtual_packet_length_bytes_ - header_.headerLength;
+ return true;
+}
+
+void Packet::CopyToHeader(RTPHeader* destination) const {
+ destination->markerBit = header_.markerBit;
+ destination->payloadType = header_.payloadType;
+ destination->sequenceNumber = header_.sequenceNumber;
+ destination->timestamp = header_.timestamp;
+ destination->ssrc = header_.ssrc;
+ destination->numCSRCs = header_.numCSRCs;
+ destination->paddingLength = header_.paddingLength;
+ destination->headerLength = header_.headerLength;
+ destination->payload_type_frequency = header_.payload_type_frequency;
+ memcpy(&destination->arrOfCSRCs,
+ &header_.arrOfCSRCs,
+ sizeof(header_.arrOfCSRCs));
+ memcpy(
+ &destination->extension, &header_.extension, sizeof(header_.extension));
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet.h
new file mode 100644
index 00000000000..eb8ce28a227
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet.h
@@ -0,0 +1,117 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_PACKET_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_PACKET_H_
+
+#include <list>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/common_types.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class RtpHeaderParser;
+
+namespace test {
+
+// Class for handling RTP packets in test applications.
+class Packet {
+ public:
+ // Creates a packet, with the packet payload (including header bytes) in
+ // |packet_memory|. The length of |packet_memory| is |allocated_bytes|.
+ // The new object assumes ownership of |packet_memory| and will delete it
+ // when the Packet object is deleted. The |time_ms| is an extra time
+ // associated with this packet, typically used to denote arrival time.
+ // The first bytes in |packet_memory| will be parsed using |parser|.
+ Packet(uint8_t* packet_memory,
+ size_t allocated_bytes,
+ double time_ms,
+ const RtpHeaderParser& parser);
+
+ // Same as above, but with the extra argument |virtual_packet_length_bytes|.
+ // This is typically used when reading RTP dump files that only contain the
+ // RTP headers, and no payload (a.k.a RTP dummy files or RTP light). The
+ // |virtual_packet_length_bytes| tells what size the packet had on wire,
+ // including the now discarded payload, whereas |allocated_bytes| is the
+ // length of the remaining payload (typically only the RTP header).
+ Packet(uint8_t* packet_memory,
+ size_t allocated_bytes,
+ size_t virtual_packet_length_bytes,
+ double time_ms,
+ const RtpHeaderParser& parser);
+
+ // The following two constructors are the same as above, but without a
+ // parser. Note that when the object is constructed using any of these
+ // methods, the header will be parsed using a default RtpHeaderParser object.
+ // In particular, RTP header extensions won't be parsed.
+ Packet(uint8_t* packet_memory, size_t allocated_bytes, double time_ms);
+
+ Packet(uint8_t* packet_memory,
+ size_t allocated_bytes,
+ size_t virtual_packet_length_bytes,
+ double time_ms);
+
+ virtual ~Packet() {}
+
+ // Parses the first bytes of the RTP payload, interpreting them as RED headers
+ // according to RFC 2198. The headers will be inserted into |headers|. The
+ // caller of the method assumes ownership of the objects in the list, and
+ // must delete them properly.
+ bool ExtractRedHeaders(std::list<RTPHeader*>* headers) const;
+
+ // Deletes all RTPHeader objects in |headers|, but does not delete |headers|
+ // itself.
+ static void DeleteRedHeaders(std::list<RTPHeader*>* headers);
+
+ const uint8_t* payload() const { return payload_; }
+
+ size_t packet_length_bytes() const { return packet_length_bytes_; }
+
+ size_t payload_length_bytes() const { return payload_length_bytes_; }
+
+ size_t virtual_packet_length_bytes() const {
+ return virtual_packet_length_bytes_;
+ }
+
+ size_t virtual_payload_length_bytes() const {
+ return virtual_payload_length_bytes_;
+ }
+
+ const RTPHeader& header() const { return header_; }
+
+ void set_time_ms(double time) { time_ms_ = time; }
+ double time_ms() const { return time_ms_; }
+ bool valid_header() const { return valid_header_; }
+
+ private:
+ bool ParseHeader(const RtpHeaderParser& parser);
+ void CopyToHeader(RTPHeader* destination) const;
+
+ RTPHeader header_;
+ scoped_ptr<uint8_t[]> payload_memory_;
+ const uint8_t* payload_; // First byte after header.
+ const size_t packet_length_bytes_; // Total length of packet.
+ size_t payload_length_bytes_; // Length of the payload, after RTP header.
+ // Zero for dummy RTP packets.
+ // Virtual lengths are used when parsing RTP header files (dummy RTP files).
+ const size_t virtual_packet_length_bytes_;
+ size_t virtual_payload_length_bytes_;
+ double time_ms_; // Used to denote a packet's arrival time.
+ bool valid_header_; // Set by the RtpHeaderParser.
+
+ DISALLOW_COPY_AND_ASSIGN(Packet);
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_PACKET_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet_source.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet_source.h
new file mode 100644
index 00000000000..669bc14e461
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet_source.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_PACKET_SOURCE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_PACKET_SOURCE_H_
+
+#include "webrtc/base/constructormagic.h"
+
+namespace webrtc {
+namespace test {
+
+class Packet;
+
+// Interface class for an object delivering RTP packets to test applications.
+class PacketSource {
+ public:
+ PacketSource() {}
+ virtual ~PacketSource() {}
+
+ // Returns a pointer to the next packet. Returns NULL if the source is
+ // depleted, or if an error occurred.
+ virtual Packet* NextPacket() = 0;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(PacketSource);
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_PACKET_SOURCE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet_unittest.cc
new file mode 100644
index 00000000000..df844ee84f7
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/packet_unittest.cc
@@ -0,0 +1,202 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for test Packet class.
+
+#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
+
+#include "gtest/gtest.h"
+
+namespace webrtc {
+namespace test {
+
+namespace {
+const int kHeaderLengthBytes = 12;
+
+void MakeRtpHeader(int payload_type,
+ int seq_number,
+ uint32_t timestamp,
+ uint32_t ssrc,
+ uint8_t* rtp_data) {
+ rtp_data[0] = 0x80;
+ rtp_data[1] = payload_type & 0xFF;
+ rtp_data[2] = (seq_number >> 8) & 0xFF;
+ rtp_data[3] = (seq_number) & 0xFF;
+ rtp_data[4] = (timestamp >> 24) & 0xFF;
+ rtp_data[5] = (timestamp >> 16) & 0xFF;
+ rtp_data[6] = (timestamp >> 8) & 0xFF;
+ rtp_data[7] = timestamp & 0xFF;
+ rtp_data[8] = (ssrc >> 24) & 0xFF;
+ rtp_data[9] = (ssrc >> 16) & 0xFF;
+ rtp_data[10] = (ssrc >> 8) & 0xFF;
+ rtp_data[11] = ssrc & 0xFF;
+}
+} // namespace
+
+TEST(TestPacket, RegularPacket) {
+ const size_t kPacketLengthBytes = 100;
+ uint8_t* packet_memory = new uint8_t[kPacketLengthBytes];
+ const uint8_t kPayloadType = 17;
+ const uint16_t kSequenceNumber = 4711;
+ const uint32_t kTimestamp = 47114711;
+ const uint32_t kSsrc = 0x12345678;
+ MakeRtpHeader(
+ kPayloadType, kSequenceNumber, kTimestamp, kSsrc, packet_memory);
+ const double kPacketTime = 1.0;
+ // Hand over ownership of |packet_memory| to |packet|.
+ Packet packet(packet_memory, kPacketLengthBytes, kPacketTime);
+ ASSERT_TRUE(packet.valid_header());
+ EXPECT_EQ(kPayloadType, packet.header().payloadType);
+ EXPECT_EQ(kSequenceNumber, packet.header().sequenceNumber);
+ EXPECT_EQ(kTimestamp, packet.header().timestamp);
+ EXPECT_EQ(kSsrc, packet.header().ssrc);
+ EXPECT_EQ(0, packet.header().numCSRCs);
+ EXPECT_EQ(kPacketLengthBytes, packet.packet_length_bytes());
+ EXPECT_EQ(kPacketLengthBytes - kHeaderLengthBytes,
+ packet.payload_length_bytes());
+ EXPECT_EQ(kPacketLengthBytes, packet.virtual_packet_length_bytes());
+ EXPECT_EQ(kPacketLengthBytes - kHeaderLengthBytes,
+ packet.virtual_payload_length_bytes());
+ EXPECT_EQ(kPacketTime, packet.time_ms());
+}
+
+TEST(TestPacket, DummyPacket) {
+ const size_t kPacketLengthBytes = kHeaderLengthBytes; // Only RTP header.
+ const size_t kVirtualPacketLengthBytes = 100;
+ uint8_t* packet_memory = new uint8_t[kPacketLengthBytes];
+ const uint8_t kPayloadType = 17;
+ const uint16_t kSequenceNumber = 4711;
+ const uint32_t kTimestamp = 47114711;
+ const uint32_t kSsrc = 0x12345678;
+ MakeRtpHeader(
+ kPayloadType, kSequenceNumber, kTimestamp, kSsrc, packet_memory);
+ const double kPacketTime = 1.0;
+ // Hand over ownership of |packet_memory| to |packet|.
+ Packet packet(packet_memory,
+ kPacketLengthBytes,
+ kVirtualPacketLengthBytes,
+ kPacketTime);
+ ASSERT_TRUE(packet.valid_header());
+ EXPECT_EQ(kPayloadType, packet.header().payloadType);
+ EXPECT_EQ(kSequenceNumber, packet.header().sequenceNumber);
+ EXPECT_EQ(kTimestamp, packet.header().timestamp);
+ EXPECT_EQ(kSsrc, packet.header().ssrc);
+ EXPECT_EQ(0, packet.header().numCSRCs);
+ EXPECT_EQ(kPacketLengthBytes, packet.packet_length_bytes());
+ EXPECT_EQ(kPacketLengthBytes - kHeaderLengthBytes,
+ packet.payload_length_bytes());
+ EXPECT_EQ(kVirtualPacketLengthBytes, packet.virtual_packet_length_bytes());
+ EXPECT_EQ(kVirtualPacketLengthBytes - kHeaderLengthBytes,
+ packet.virtual_payload_length_bytes());
+ EXPECT_EQ(kPacketTime, packet.time_ms());
+}
+
+namespace {
+// Writes one RED block header starting at |rtp_data|, according to RFC 2198.
+// returns the number of bytes written (1 or 4).
+//
+// Format if |last_payoad| is false:
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |1| block PT | timestamp offset | block length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//
+// Format if |last_payoad| is true:
+// 0 1 2 3 4 5 6 7
+// +-+-+-+-+-+-+-+-+
+// |0| Block PT |
+// +-+-+-+-+-+-+-+-+
+
+int MakeRedHeader(int payload_type,
+ uint32_t timestamp_offset,
+ int block_length,
+ bool last_payload,
+ uint8_t* rtp_data) {
+ rtp_data[0] = 0x80 | (payload_type & 0x7F); // Set the first bit to 1.
+ if (last_payload) {
+ rtp_data[0] &= 0x7F; // Reset the first but to 0 to indicate last block.
+ return 1;
+ }
+ rtp_data[1] = timestamp_offset >> 6;
+ rtp_data[2] = (timestamp_offset & 0x3F) << 2;
+ rtp_data[2] |= block_length >> 8;
+ rtp_data[3] = block_length & 0xFF;
+ return 4;
+}
+} // namespace
+
+TEST(TestPacket, RED) {
+ const size_t kPacketLengthBytes = 100;
+ uint8_t* packet_memory = new uint8_t[kPacketLengthBytes];
+ const uint8_t kRedPayloadType = 17;
+ const uint16_t kSequenceNumber = 4711;
+ const uint32_t kTimestamp = 47114711;
+ const uint32_t kSsrc = 0x12345678;
+ MakeRtpHeader(
+ kRedPayloadType, kSequenceNumber, kTimestamp, kSsrc, packet_memory);
+ // Create four RED headers.
+ // Payload types are just the same as the block index the offset is 100 times
+ // the block index.
+ const int kRedBlocks = 4;
+ uint8_t* payload_ptr =
+ &packet_memory[kHeaderLengthBytes]; // First byte after header.
+ for (int i = 0; i < kRedBlocks; ++i) {
+ int payload_type = i;
+ // Offset value is not used for the last block.
+ uint32_t timestamp_offset = 100 * i;
+ int block_length = 10 * i;
+ bool last_block = (i == kRedBlocks - 1) ? true : false;
+ payload_ptr += MakeRedHeader(
+ payload_type, timestamp_offset, block_length, last_block, payload_ptr);
+ }
+ const double kPacketTime = 1.0;
+ // Hand over ownership of |packet_memory| to |packet|.
+ Packet packet(packet_memory, kPacketLengthBytes, kPacketTime);
+ ASSERT_TRUE(packet.valid_header());
+ EXPECT_EQ(kRedPayloadType, packet.header().payloadType);
+ EXPECT_EQ(kSequenceNumber, packet.header().sequenceNumber);
+ EXPECT_EQ(kTimestamp, packet.header().timestamp);
+ EXPECT_EQ(kSsrc, packet.header().ssrc);
+ EXPECT_EQ(0, packet.header().numCSRCs);
+ EXPECT_EQ(kPacketLengthBytes, packet.packet_length_bytes());
+ EXPECT_EQ(kPacketLengthBytes - kHeaderLengthBytes,
+ packet.payload_length_bytes());
+ EXPECT_EQ(kPacketLengthBytes, packet.virtual_packet_length_bytes());
+ EXPECT_EQ(kPacketLengthBytes - kHeaderLengthBytes,
+ packet.virtual_payload_length_bytes());
+ EXPECT_EQ(kPacketTime, packet.time_ms());
+ std::list<RTPHeader*> red_headers;
+ EXPECT_TRUE(packet.ExtractRedHeaders(&red_headers));
+ EXPECT_EQ(kRedBlocks, static_cast<int>(red_headers.size()));
+ int block_index = 0;
+ for (std::list<RTPHeader*>::reverse_iterator it = red_headers.rbegin();
+ it != red_headers.rend();
+ ++it) {
+ // Reading list from the back, since the extraction puts the main payload
+ // (which is the last one on wire) first.
+ RTPHeader* red_block = *it;
+ EXPECT_EQ(block_index, red_block->payloadType);
+ EXPECT_EQ(kSequenceNumber, red_block->sequenceNumber);
+ if (block_index == kRedBlocks - 1) {
+ // Last block has zero offset per definition.
+ EXPECT_EQ(kTimestamp, red_block->timestamp);
+ } else {
+ EXPECT_EQ(kTimestamp - 100 * block_index, red_block->timestamp);
+ }
+ EXPECT_EQ(kSsrc, red_block->ssrc);
+ EXPECT_EQ(0, red_block->numCSRCs);
+ ++block_index;
+ }
+ Packet::DeleteRedHeaders(&red_headers);
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_analyze.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_analyze.cc
new file mode 100644
index 00000000000..773cc2c8962
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_analyze.cc
@@ -0,0 +1,147 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+#include <stdio.h>
+#include <vector>
+
+#include "google/gflags.h"
+#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
+#include "webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+// Flag validator.
+static bool ValidatePayloadType(const char* flagname, int32_t value) {
+ if (value >= 0 && value <= 127) // Value is ok.
+ return true;
+ printf("Invalid value for --%s: %d\n", flagname, static_cast<int>(value));
+ return false;
+}
+static bool ValidateExtensionId(const char* flagname, int32_t value) {
+ if (value > 0 && value <= 255) // Value is ok.
+ return true;
+ printf("Invalid value for --%s: %d\n", flagname, static_cast<int>(value));
+ return false;
+}
+
+// Define command line flags.
+DEFINE_int32(red, 117, "RTP payload type for RED");
+static const bool red_dummy =
+ google::RegisterFlagValidator(&FLAGS_red, &ValidatePayloadType);
+DEFINE_int32(audio_level, 1, "Extension ID for audio level (RFC 6464)");
+static const bool audio_level_dummy =
+ google::RegisterFlagValidator(&FLAGS_audio_level, &ValidateExtensionId);
+
+int main(int argc, char* argv[]) {
+ std::string program_name = argv[0];
+ std::string usage =
+ "Tool for parsing an RTP dump file to text output.\n"
+ "Run " +
+ program_name +
+ " --helpshort for usage.\n"
+ "Example usage:\n" +
+ program_name + " input.rtp output.txt\n\n" +
+ "Output is sent to stdout if no output file is given." +
+ "Note that this tool can read files with our without payloads.";
+ google::SetUsageMessage(usage);
+ google::ParseCommandLineFlags(&argc, &argv, true);
+
+ if (argc != 2 && argc != 3) {
+ // Print usage information.
+ printf("%s", google::ProgramUsage());
+ return 0;
+ }
+
+ FILE* in_file = fopen(argv[1], "rb");
+ if (!in_file) {
+ printf("Cannot open input file %s\n", argv[1]);
+ return -1;
+ }
+ printf("Input file: %s\n", argv[1]);
+ webrtc::scoped_ptr<webrtc::test::RtpFileSource> file_source(
+ webrtc::test::RtpFileSource::Create(argv[1]));
+ assert(file_source.get());
+ // Set RTP extension ID.
+ bool print_audio_level = false;
+ if (!google::GetCommandLineFlagInfoOrDie("audio_level").is_default) {
+ print_audio_level = true;
+ file_source->RegisterRtpHeaderExtension(webrtc::kRtpExtensionAudioLevel,
+ FLAGS_audio_level);
+ }
+
+ FILE* out_file;
+ if (argc == 3) {
+ out_file = fopen(argv[2], "wt");
+ if (!out_file) {
+ printf("Cannot open output file %s\n", argv[2]);
+ return -1;
+ }
+ printf("Output file: %s\n\n", argv[2]);
+ } else {
+ out_file = stdout;
+ }
+
+ // Print file header.
+ fprintf(out_file, "SeqNo TimeStamp SendTime Size PT M SSRC");
+ if (print_audio_level) {
+ fprintf(out_file, " AuLvl (V)");
+ }
+ fprintf(out_file, "\n");
+
+ webrtc::scoped_ptr<webrtc::test::Packet> packet;
+ while (!file_source->EndOfFile()) {
+ packet.reset(file_source->NextPacket());
+ if (!packet.get()) {
+ // This is probably an RTCP packet. Move on to the next one.
+ continue;
+ }
+ assert(packet.get());
+ // Write packet data to file.
+ fprintf(out_file,
+ "%5u %10u %10u %5i %5i %2i %#08X",
+ packet->header().sequenceNumber,
+ packet->header().timestamp,
+ static_cast<unsigned int>(packet->time_ms()),
+ static_cast<int>(packet->packet_length_bytes()),
+ packet->header().payloadType,
+ packet->header().markerBit,
+ packet->header().ssrc);
+ if (print_audio_level && packet->header().extension.hasAudioLevel) {
+ // |audioLevel| consists of one bit for "V" and then 7 bits level.
+ fprintf(out_file,
+ " %5u (%1i)",
+ packet->header().extension.audioLevel & 0x7F,
+ (packet->header().extension.audioLevel & 0x80) == 0 ? 0 : 1);
+ }
+ fprintf(out_file, "\n");
+
+ if (packet->header().payloadType == FLAGS_red) {
+ std::list<webrtc::RTPHeader*> red_headers;
+ packet->ExtractRedHeaders(&red_headers);
+ while (!red_headers.empty()) {
+ webrtc::RTPHeader* red = red_headers.front();
+ assert(red);
+ fprintf(out_file,
+ "* %5u %10u %10u %5i\n",
+ red->sequenceNumber,
+ red->timestamp,
+ static_cast<unsigned int>(packet->time_ms()),
+ red->payloadType);
+ red_headers.pop_front();
+ delete red;
+ }
+ }
+ }
+
+ fclose(in_file);
+ fclose(out_file);
+
+ return 0;
+}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.cc
new file mode 100644
index 00000000000..6490d46857c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.cc
@@ -0,0 +1,147 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h"
+
+#include <assert.h>
+#include <string.h>
+#ifdef WIN32
+#include <winsock2.h>
+#else
+#include <netinet/in.h>
+#endif
+
+#include "webrtc/modules/audio_coding/neteq/tools/packet.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+
+namespace webrtc {
+namespace test {
+
+RtpFileSource* RtpFileSource::Create(const std::string& file_name) {
+ RtpFileSource* source = new RtpFileSource;
+ assert(source);
+ if (!source->OpenFile(file_name) || !source->SkipFileHeader()) {
+ assert(false);
+ delete source;
+ return NULL;
+ }
+ return source;
+}
+
+RtpFileSource::~RtpFileSource() {
+ if (in_file_)
+ fclose(in_file_);
+}
+
+bool RtpFileSource::RegisterRtpHeaderExtension(RTPExtensionType type,
+ uint8_t id) {
+ assert(parser_.get());
+ return parser_->RegisterRtpHeaderExtension(type, id);
+}
+
+Packet* RtpFileSource::NextPacket() {
+ while (!EndOfFile()) {
+ uint16_t length;
+ if (fread(&length, sizeof(length), 1, in_file_) == 0) {
+ assert(false);
+ return NULL;
+ }
+ length = ntohs(length);
+
+ uint16_t plen;
+ if (fread(&plen, sizeof(plen), 1, in_file_) == 0) {
+ assert(false);
+ return NULL;
+ }
+ plen = ntohs(plen);
+
+ uint32_t offset;
+ if (fread(&offset, sizeof(offset), 1, in_file_) == 0) {
+ assert(false);
+ return NULL;
+ }
+ offset = ntohl(offset);
+
+ // Use length here because a plen of 0 specifies RTCP.
+ assert(length >= kPacketHeaderSize);
+ size_t packet_size_bytes = length - kPacketHeaderSize;
+ if (packet_size_bytes == 0) {
+ // May be an RTCP packet.
+ // Read the next one.
+ continue;
+ }
+ scoped_ptr<uint8_t> packet_memory(new uint8_t[packet_size_bytes]);
+ if (fread(packet_memory.get(), 1, packet_size_bytes, in_file_) !=
+ packet_size_bytes) {
+ assert(false);
+ return NULL;
+ }
+ scoped_ptr<Packet> packet(new Packet(packet_memory.release(),
+ packet_size_bytes,
+ plen,
+ offset,
+ *parser_.get()));
+ if (!packet->valid_header()) {
+ assert(false);
+ return NULL;
+ }
+ return packet.release();
+ }
+ return NULL;
+}
+
+bool RtpFileSource::EndOfFile() const {
+ assert(in_file_);
+ return ftell(in_file_) >= file_end_;
+}
+
+RtpFileSource::RtpFileSource()
+ : PacketSource(),
+ in_file_(NULL),
+ file_end_(-1),
+ parser_(RtpHeaderParser::Create()) {}
+
+bool RtpFileSource::OpenFile(const std::string& file_name) {
+ in_file_ = fopen(file_name.c_str(), "rb");
+ assert(in_file_);
+ if (in_file_ == NULL) {
+ return false;
+ }
+
+ // Find out how long the file is.
+ fseek(in_file_, 0, SEEK_END);
+ file_end_ = ftell(in_file_);
+ rewind(in_file_);
+ return true;
+}
+
+bool RtpFileSource::SkipFileHeader() {
+ char firstline[kFirstLineLength];
+ assert(in_file_);
+ if (fgets(firstline, kFirstLineLength, in_file_) == NULL) {
+ assert(false);
+ return false;
+ }
+ // Check that the first line is ok.
+ if ((strncmp(firstline, "#!rtpplay1.0", 12) != 0) &&
+ (strncmp(firstline, "#!RTPencode1.0", 14) != 0)) {
+ assert(false);
+ return false;
+ }
+ // Skip the file header.
+ if (fseek(in_file_, kRtpFileHeaderSize, SEEK_CUR) != 0) {
+ assert(false);
+ return false;
+ }
+ return true;
+}
+
+} // namespace test
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h
new file mode 100644
index 00000000000..6b92a88698f
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_file_source.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_RTP_FILE_SOURCE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_RTP_FILE_SOURCE_H_
+
+#include <stdio.h>
+#include <string>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/audio_coding/neteq/tools/packet_source.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class RtpHeaderParser;
+
+namespace test {
+
+class RtpFileSource : public PacketSource {
+ public:
+ // Creates an RtpFileSource reading from |file_name|. If the file cannot be
+ // opened, or has the wrong format, NULL will be returned.
+ static RtpFileSource* Create(const std::string& file_name);
+
+ virtual ~RtpFileSource();
+
+ // Registers an RTP header extension and binds it to |id|.
+ virtual bool RegisterRtpHeaderExtension(RTPExtensionType type, uint8_t id);
+
+ // Returns a pointer to the next packet. Returns NULL if end of file was
+ // reached, or if a the data was corrupt.
+ virtual Packet* NextPacket();
+
+ // Returns true if the end of file has been reached.
+ virtual bool EndOfFile() const;
+
+ private:
+ static const int kFirstLineLength = 40;
+ static const int kRtpFileHeaderSize = 4 + 4 + 4 + 2 + 2;
+ static const size_t kPacketHeaderSize = 8;
+
+ RtpFileSource();
+
+ bool OpenFile(const std::string& file_name);
+
+ bool SkipFileHeader();
+
+ FILE* in_file_;
+ int64_t file_end_;
+ scoped_ptr<RtpHeaderParser> parser_;
+
+ DISALLOW_COPY_AND_ASSIGN(RtpFileSource);
+};
+
+} // namespace test
+} // namespace webrtc
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_RTP_FILE_SOURCE_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/rtp_generator.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_generator.cc
index 8d9a89d5450..17ac209f1d9 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/rtp_generator.cc
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_generator.cc
@@ -10,7 +10,7 @@
#include <assert.h>
-#include "webrtc/modules/audio_coding/neteq4/tools/rtp_generator.h"
+#include "webrtc/modules/audio_coding/neteq/tools/rtp_generator.h"
namespace webrtc {
namespace test {
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/rtp_generator.h b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_generator.h
index ece7ef29808..d3824c8d22d 100644
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/tools/rtp_generator.h
+++ b/chromium/third_party/webrtc/modules/audio_coding/neteq/tools/rtp_generator.h
@@ -8,11 +8,11 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_RTP_GENERATOR_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_RTP_GENERATOR_H_
+#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_RTP_GENERATOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_RTP_GENERATOR_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -54,4 +54,4 @@ class RtpGenerator {
} // namespace test
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_TOOLS_RTP_GENERATOR_H_
+#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ_TOOLS_RTP_GENERATOR_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/unmute_signal.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/unmute_signal.c
deleted file mode 100644
index 3128f21f4a0..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/unmute_signal.c
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This function "unmutes" a vector on a sample by sample basis.
- */
-
-#include "dsp_helpfunctions.h"
-
-#include "signal_processing_library.h"
-
-
-void WebRtcNetEQ_UnmuteSignal(int16_t *pw16_inVec, int16_t *startMuteFact,
- int16_t *pw16_outVec, int16_t unmuteFact,
- int16_t N)
-{
- int i;
- uint16_t w16_tmp;
- int32_t w32_tmp;
-
- w16_tmp = (uint16_t) *startMuteFact;
- w32_tmp = WEBRTC_SPL_LSHIFT_W32((int32_t)w16_tmp,6) + 32;
- for (i = 0; i < N; i++)
- {
- pw16_outVec[i]
- = (int16_t) ((WEBRTC_SPL_MUL_16_16(w16_tmp, pw16_inVec[i]) + 8192) >> 14);
- w32_tmp += unmuteFact;
- w32_tmp = WEBRTC_SPL_MAX(0, w32_tmp);
- w16_tmp = (uint16_t) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 6); /* 20 - 14 = 6 */
- w16_tmp = WEBRTC_SPL_MIN(16384, w16_tmp);
- }
- *startMuteFact = (int16_t) w16_tmp;
-}
-
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/webrtc_neteq.c b/chromium/third_party/webrtc/modules/audio_coding/neteq/webrtc_neteq.c
deleted file mode 100644
index fad690d0810..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/webrtc_neteq.c
+++ /dev/null
@@ -1,1769 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * Implementation of main NetEQ API.
- */
-
-#include "webrtc_neteq.h"
-#include "webrtc_neteq_internal.h"
-
-#include <assert.h>
-#include <string.h>
-
-#include "typedefs.h"
-#include "signal_processing_library.h"
-
-#include "neteq_error_codes.h"
-#include "mcu_dsp_common.h"
-#include "rtcp.h"
-
-#define RETURN_ON_ERROR( macroExpr, macroInstPtr ) { \
- if ((macroExpr) != 0) { \
- if ((macroExpr) == -1) { \
- (macroInstPtr)->ErrorCode = - (NETEQ_OTHER_ERROR); \
- } else { \
- (macroInstPtr)->ErrorCode = -((int16_t) (macroExpr)); \
- } \
- return(-1); \
- } }
-
-int WebRtcNetEQ_strncpy(char *strDest, int numberOfElements,
- const char *strSource, int count)
-{
- /* check vector lengths */
- if (count > numberOfElements)
- {
- strDest[0] = '\0';
- return (-1);
- }
- else
- {
- strncpy(strDest, strSource, count);
- return (0);
- }
-}
-
-/**********************************************************
- * NETEQ Functions
- */
-
-/*****************************************
- * Error functions
- */
-
-int WebRtcNetEQ_GetErrorCode(void *inst)
-{
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- return (NetEqMainInst->ErrorCode);
-}
-
-int WebRtcNetEQ_GetErrorName(int errorCode, char *errorName, int maxStrLen)
-{
- if ((errorName == NULL) || (maxStrLen <= 0))
- {
- return (-1);
- }
-
- if (errorCode < 0)
- {
- errorCode = -errorCode; // absolute value
- }
-
- switch (errorCode)
- {
- case 1: // could be -1
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "OTHER_ERROR", maxStrLen);
- break;
- }
- case 1001:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "FAULTY_INSTRUCTION", maxStrLen);
- break;
- }
- case 1002:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "FAULTY_NETWORK_TYPE", maxStrLen);
- break;
- }
- case 1003:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "FAULTY_DELAYVALUE", maxStrLen);
- break;
- }
- case 1004:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "FAULTY_PLAYOUTMODE", maxStrLen);
- break;
- }
- case 1005:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "CORRUPT_INSTANCE", maxStrLen);
- break;
- }
- case 1006:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "ILLEGAL_MASTER_SLAVE_SWITCH", maxStrLen);
- break;
- }
- case 1007:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "MASTER_SLAVE_ERROR", maxStrLen);
- break;
- }
- case 2001:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "UNKNOWN_BUFSTAT_DECISION", maxStrLen);
- break;
- }
- case 2002:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECOUT_ERROR_DECODING", maxStrLen);
- break;
- }
- case 2003:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECOUT_ERROR_SAMPLEUNDERRUN", maxStrLen);
- break;
- }
- case 2004:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECOUT_ERROR_DECODED_TOO_MUCH",
- maxStrLen);
- break;
- }
- case 3001:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECIN_CNG_ERROR", maxStrLen);
- break;
- }
- case 3002:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECIN_UNKNOWNPAYLOAD", maxStrLen);
- break;
- }
- case 3003:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECIN_BUFFERINSERT_ERROR", maxStrLen);
- break;
- }
- case 4001:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "PBUFFER_INIT_ERROR", maxStrLen);
- break;
- }
- case 4002:
- case 4003:
- case 4004:
- case 4005:
- case 4006:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "PBUFFER_INSERT_ERROR1", maxStrLen);
- break;
- }
- case 4007:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "UNKNOWN_G723_HEADER", maxStrLen);
- break;
- }
- case 4008:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "PBUFFER_NONEXISTING_PACKET", maxStrLen);
- break;
- }
- case 4009:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "PBUFFER_NOT_INITIALIZED", maxStrLen);
- break;
- }
- case 4010:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "AMBIGUOUS_ILBC_FRAME_SIZE", maxStrLen);
- break;
- }
- case 5001:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_FULL", maxStrLen);
- break;
- }
- case 5002:
- case 5003:
- case 5004:
- case 5005:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_NOT_EXIST", maxStrLen);
- break;
- }
- case 5006:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_UNKNOWN_CODEC", maxStrLen);
- break;
- }
- case 5007:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_PAYLOAD_TAKEN", maxStrLen);
- break;
- }
- case 5008:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_UNSUPPORTED_CODEC", maxStrLen);
- break;
- }
- case 5009:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_UNSUPPORTED_FS", maxStrLen);
- break;
- }
- case 6001:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "DTMF_DEC_PARAMETER_ERROR", maxStrLen);
- break;
- }
- case 6002:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "DTMF_INSERT_ERROR", maxStrLen);
- break;
- }
- case 6003:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "DTMF_GEN_UNKNOWN_SAMP_FREQ", maxStrLen);
- break;
- }
- case 6004:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "DTMF_NOT_SUPPORTED", maxStrLen);
- break;
- }
- case 7001:
- case 7002:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "RED_SPLIT_ERROR", maxStrLen);
- break;
- }
- case 7003:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "RTP_TOO_SHORT_PACKET", maxStrLen);
- break;
- }
- case 7004:
- {
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "RTP_CORRUPT_PACKET", maxStrLen);
- break;
- }
- default:
- {
- /* check for decoder error ranges */
- if (errorCode >= 6010 && errorCode <= 6810)
- {
- /* iSAC error code */
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "iSAC ERROR", maxStrLen);
- break;
- }
-
- WebRtcNetEQ_strncpy(errorName, maxStrLen, "UNKNOWN_ERROR", maxStrLen);
- return (-1);
- }
- }
-
- return (0);
-}
-
-/* Assign functions (create not allowed in order to avoid malloc in lib) */
-int WebRtcNetEQ_AssignSize(int *sizeinbytes)
-{
- *sizeinbytes = (sizeof(MainInst_t) * 2) / sizeof(int16_t);
- return (0);
-}
-
-int WebRtcNetEQ_Assign(void **inst, void *NETEQ_inst_Addr)
-{
- int ok = 0;
- MainInst_t *NetEqMainInst = (MainInst_t*) NETEQ_inst_Addr;
- *inst = NETEQ_inst_Addr;
- if (*inst == NULL) return (-1);
-
- WebRtcSpl_Init();
-
- /* Clear memory */
- WebRtcSpl_MemSetW16((int16_t*) NetEqMainInst, 0,
- (sizeof(MainInst_t) / sizeof(int16_t)));
- ok = WebRtcNetEQ_McuReset(&NetEqMainInst->MCUinst);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (0);
-}
-
-int WebRtcNetEQ_GetRecommendedBufferSize(void *inst, const enum WebRtcNetEQDecoder *codec,
- int noOfCodecs, enum WebRtcNetEQNetworkType nwType,
- int *MaxNoOfPackets, int *sizeinbytes,
- int* per_packet_overhead_bytes)
-{
- int ok = 0;
- int multiplier;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- *MaxNoOfPackets = 0;
- *sizeinbytes = 0;
- ok = WebRtcNetEQ_GetDefaultCodecSettings(codec, noOfCodecs, sizeinbytes,
- MaxNoOfPackets,
- per_packet_overhead_bytes);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- if (nwType == kUDPNormal)
- {
- multiplier = 1;
- }
- else if (nwType == kUDPVideoSync)
- {
- multiplier = 4;
- }
- else if (nwType == kTCPNormal)
- {
- multiplier = 4;
- }
- else if (nwType == kTCPLargeJitter)
- {
- multiplier = 8;
- }
- else if (nwType == kTCPXLargeJitter)
- {
- multiplier = 12;
- }
- else
- {
- NetEqMainInst->ErrorCode = -FAULTY_NETWORK_TYPE;
- return (-1);
- }
- *MaxNoOfPackets = (*MaxNoOfPackets) * multiplier;
- *sizeinbytes = (*sizeinbytes) * multiplier;
- return 0;
-}
-
-int WebRtcNetEQ_AssignBuffer(void *inst, int MaxNoOfPackets, void *NETEQ_Buffer_Addr,
- int sizeinbytes)
-{
- int ok;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- ok = WebRtcNetEQ_PacketBufferInit(&NetEqMainInst->MCUinst.PacketBuffer_inst,
- MaxNoOfPackets, (int16_t*) NETEQ_Buffer_Addr, (sizeinbytes >> 1));
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (ok);
-}
-
-/************************************************
- * Init functions
- */
-
-/****************************************************************************
- * WebRtcNetEQ_Init(...)
- *
- * Initialize NetEQ.
- *
- * Input:
- * - inst : NetEQ instance
- * - fs : Initial sample rate in Hz (may change with payload)
- *
- * Output:
- * - inst : Initialized NetEQ instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_Init(void *inst, uint16_t fs)
-{
- int ok = 0;
-
- /* Typecast inst to internal instance format */
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
-
- if (NetEqMainInst == NULL)
- {
- return (-1);
- }
-
-#ifdef NETEQ_VAD
- /* Start out with no PostDecode VAD instance */
- NetEqMainInst->DSPinst.VADInst.VADState = NULL;
- /* Also set all VAD function pointers to NULL */
- NetEqMainInst->DSPinst.VADInst.initFunction = NULL;
- NetEqMainInst->DSPinst.VADInst.setmodeFunction = NULL;
- NetEqMainInst->DSPinst.VADInst.VADFunction = NULL;
-#endif /* NETEQ_VAD */
-
- ok = WebRtcNetEQ_DSPinit(NetEqMainInst); /* Init addresses between MCU and DSP */
- RETURN_ON_ERROR(ok, NetEqMainInst);
-
- ok = WebRtcNetEQ_DSPInit(&NetEqMainInst->DSPinst, fs); /* Init dsp side */
- RETURN_ON_ERROR(ok, NetEqMainInst);
- /* set BGN mode to default, since it is not cleared by DSP init function */
- NetEqMainInst->DSPinst.BGNInst.bgnMode = BGN_ON;
-
- /* init statistics functions and counters */
- ok = WebRtcNetEQ_ClearInCallStats(&NetEqMainInst->DSPinst);
- RETURN_ON_ERROR(ok, NetEqMainInst);
- ok = WebRtcNetEQ_ClearPostCallStats(&NetEqMainInst->DSPinst);
- RETURN_ON_ERROR(ok, NetEqMainInst);
- ok = WebRtcNetEQ_ResetMcuJitterStat(&NetEqMainInst->MCUinst);
- RETURN_ON_ERROR(ok, NetEqMainInst);
-
- /* flush packet buffer */
- ok = WebRtcNetEQ_PacketBufferFlush(&NetEqMainInst->MCUinst.PacketBuffer_inst);
- RETURN_ON_ERROR(ok, NetEqMainInst);
-
- /* set some variables to initial values */
- NetEqMainInst->MCUinst.current_Codec = -1;
- NetEqMainInst->MCUinst.current_Payload = -1;
- NetEqMainInst->MCUinst.first_packet = 1;
- NetEqMainInst->MCUinst.one_desc = 0;
- NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.extraDelayMs = 0;
- NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.minimum_delay_ms = 0;
- NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.maximum_delay_ms =
- 10000;
- NetEqMainInst->MCUinst.NoOfExpandCalls = 0;
- NetEqMainInst->MCUinst.fs = fs;
-
- /* Not in AV-sync by default. */
- NetEqMainInst->MCUinst.av_sync = 0;
-
-#ifdef NETEQ_ATEVENT_DECODE
- /* init DTMF decoder */
- ok = WebRtcNetEQ_DtmfDecoderInit(&(NetEqMainInst->MCUinst.DTMF_inst),fs,560);
- RETURN_ON_ERROR(ok, NetEqMainInst);
-#endif
-
- /* init RTCP statistics */
- WebRtcNetEQ_RTCPInit(&(NetEqMainInst->MCUinst.RTCP_inst), 0);
-
- /* set BufferStat struct to zero */
- WebRtcSpl_MemSetW16((int16_t*) &(NetEqMainInst->MCUinst.BufferStat_inst), 0,
- sizeof(BufstatsInst_t) / sizeof(int16_t));
-
- /* reset automode */
- WebRtcNetEQ_ResetAutomode(&(NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst),
- NetEqMainInst->MCUinst.PacketBuffer_inst.maxInsertPositions);
-
- NetEqMainInst->ErrorCode = 0;
-
-#ifdef NETEQ_STEREO
- /* set master/slave info to undecided */
- NetEqMainInst->masterSlave = 0;
-#endif
-
- /* Set to an invalid value. */
- NetEqMainInst->MCUinst.decoded_packet_sequence_number = -1;
- NetEqMainInst->MCUinst.decoded_packet_timestamp = 0;
-
- return (ok);
-}
-
-int WebRtcNetEQ_FlushBuffers(void *inst)
-{
- int ok = 0;
-
- /* Typecast inst to internal instance format */
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
-
- if (NetEqMainInst == NULL)
- {
- return (-1);
- }
-
- /* Flush packet buffer */
- ok = WebRtcNetEQ_PacketBufferFlush(&NetEqMainInst->MCUinst.PacketBuffer_inst);
- RETURN_ON_ERROR(ok, NetEqMainInst);
-
- /* Set MCU to wait for new codec */
- NetEqMainInst->MCUinst.first_packet = 1;
-
- /* Flush speech buffer */
- ok = WebRtcNetEQ_FlushSpeechBuffer(&NetEqMainInst->DSPinst);
- RETURN_ON_ERROR(ok, NetEqMainInst);
-
- return 0;
-}
-
-int WebRtcNetEQ_SetAVTPlayout(void *inst, int PlayoutAVTon)
-{
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
-#ifdef NETEQ_ATEVENT_DECODE
- NetEqMainInst->MCUinst.AVT_PlayoutOn = PlayoutAVTon;
- return(0);
-#else
- if (PlayoutAVTon != 0)
- {
- NetEqMainInst->ErrorCode = -DTMF_NOT_SUPPORTED;
- return (-1);
- }
- else
- {
- return (0);
- }
-#endif
-}
-
-int WebRtcNetEQ_SetExtraDelay(void *inst, int DelayInMs) {
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- if ((DelayInMs < 0) || (DelayInMs > 10000)) {
- NetEqMainInst->ErrorCode = -FAULTY_DELAYVALUE;
- return (-1);
- }
- NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.extraDelayMs = DelayInMs;
- return (0);
-}
-
-int WebRtcNetEQ_SetMinimumDelay(void *inst, int minimum_delay_ms) {
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return -1;
- if (minimum_delay_ms < 0 || minimum_delay_ms > 10000) {
- NetEqMainInst->ErrorCode = -FAULTY_DELAYVALUE;
- return -1;
- }
- if ((NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.maximum_delay_ms >
- 0) && (minimum_delay_ms >
- NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.maximum_delay_ms)) {
- NetEqMainInst->ErrorCode = -FAULTY_DELAYVALUE;
- return -1;
- }
- NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.minimum_delay_ms =
- minimum_delay_ms;
- return 0;
-}
-
-int WebRtcNetEQ_SetMaximumDelay(void *inst, int maximum_delay_ms) {
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return -1;
- if (maximum_delay_ms < 0 || maximum_delay_ms > 10000) {
- NetEqMainInst->ErrorCode = -FAULTY_DELAYVALUE;
- return -1;
- }
- if (maximum_delay_ms <
- NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.minimum_delay_ms) {
- NetEqMainInst->ErrorCode = -FAULTY_DELAYVALUE;
- return -1;
- }
- NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.maximum_delay_ms =
- maximum_delay_ms;
- return 0;
-}
-
-int WebRtcNetEQ_SetPlayoutMode(void *inst, enum WebRtcNetEQPlayoutMode playoutMode)
-{
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- if ((playoutMode != kPlayoutOn) && (playoutMode != kPlayoutOff) && (playoutMode
- != kPlayoutFax) && (playoutMode != kPlayoutStreaming))
- {
- NetEqMainInst->ErrorCode = -FAULTY_PLAYOUTMODE;
- return (-1);
- }
- else
- {
- NetEqMainInst->MCUinst.NetEqPlayoutMode = playoutMode;
- return (0);
- }
-}
-
-int WebRtcNetEQ_SetBGNMode(void *inst, enum WebRtcNetEQBGNMode bgnMode)
-{
-
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
-
- /* Instance sanity */
- if (NetEqMainInst == NULL) return (-1);
-
- /* Check for corrupt/cleared instance */
- if (NetEqMainInst->MCUinst.main_inst != NetEqMainInst)
- {
- /* Instance is corrupt */
- NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
- return (-1);
- }
-
- NetEqMainInst->DSPinst.BGNInst.bgnMode = (enum BGNMode) bgnMode;
-
- return (0);
-}
-
-int WebRtcNetEQ_GetBGNMode(const void *inst, enum WebRtcNetEQBGNMode *bgnMode)
-{
-
- const MainInst_t *NetEqMainInst = (const MainInst_t*) inst;
-
- /* Instance sanity */
- if (NetEqMainInst == NULL) return (-1);
-
- *bgnMode = (enum WebRtcNetEQBGNMode) NetEqMainInst->DSPinst.BGNInst.bgnMode;
-
- return (0);
-}
-
-/************************************************
- * CodecDB functions
- */
-
-int WebRtcNetEQ_CodecDbReset(void *inst)
-{
- int ok = 0;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- ok = WebRtcNetEQ_DbReset(&NetEqMainInst->MCUinst.codec_DB_inst);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
-
- /* set function pointers to NULL to prevent RecOut from using the codec */
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecode = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodeRCU = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcAddLatePkt = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecode = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodeInit = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodePLC = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcGetMDinfo = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcUpdBWEst = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcGetErrorCode = NULL;
-
- return (0);
-}
-
-int WebRtcNetEQ_CodecDbGetSizeInfo(void *inst, int16_t *UsedEntries,
- int16_t *MaxEntries)
-{
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- *MaxEntries = NUM_CODECS;
- *UsedEntries = NetEqMainInst->MCUinst.codec_DB_inst.nrOfCodecs;
- return (0);
-}
-
-int WebRtcNetEQ_CodecDbGetCodecInfo(void *inst, int16_t Entry,
- enum WebRtcNetEQDecoder *codec)
-{
- int i;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- *codec = (enum WebRtcNetEQDecoder) 0;
- if ((Entry >= 0) && (Entry < NetEqMainInst->MCUinst.codec_DB_inst.nrOfCodecs))
- {
- for (i = 0; i < NUM_TOTAL_CODECS; i++)
- {
- if (NetEqMainInst->MCUinst.codec_DB_inst.position[i] == Entry)
- {
- *codec = (enum WebRtcNetEQDecoder) i;
- }
- }
- }
- else
- {
- NetEqMainInst->ErrorCode = -(CODEC_DB_NOT_EXIST1);
- return (-1);
- }
- return (0);
-}
-
-int WebRtcNetEQ_CodecDbAdd(void *inst, WebRtcNetEQ_CodecDef *codecInst)
-{
- int ok = 0;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- ok = WebRtcNetEQ_DbAdd(&NetEqMainInst->MCUinst.codec_DB_inst, codecInst->codec,
- codecInst->payloadType, codecInst->funcDecode, codecInst->funcDecodeRCU,
- codecInst->funcDecodePLC, codecInst->funcDecodeInit, codecInst->funcAddLatePkt,
- codecInst->funcGetMDinfo, codecInst->funcGetPitch, codecInst->funcUpdBWEst,
- codecInst->funcDurationEst, codecInst->funcGetErrorCode,
- codecInst->codec_state, codecInst->codec_fs);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (ok);
-}
-
-int WebRtcNetEQ_CodecDbRemove(void *inst, enum WebRtcNetEQDecoder codec)
-{
- int ok = 0;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
-
- /* check if currently used codec is being removed */
- if (NetEqMainInst->MCUinst.current_Codec == (int16_t) codec)
- {
- /* set function pointers to NULL to prevent RecOut from using the codec */
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecode = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodeRCU = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcAddLatePkt = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecode = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodeInit = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodePLC = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcGetMDinfo = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcUpdBWEst = NULL;
- NetEqMainInst->DSPinst.codec_ptr_inst.funcGetErrorCode = NULL;
- }
-
- ok = WebRtcNetEQ_DbRemove(&NetEqMainInst->MCUinst.codec_DB_inst, codec);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (ok);
-}
-
-/*********************************
- * Real-time functions
- */
-
-int WebRtcNetEQ_RecIn(void *inst, int16_t *p_w16datagramstart, int16_t w16_RTPlen,
- uint32_t uw32_timeRec)
-{
- int ok = 0;
- RTPPacket_t RTPpacket;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
-
- /* Check for corrupt/cleared instance */
- if (NetEqMainInst->MCUinst.main_inst != NetEqMainInst)
- {
- /* Instance is corrupt */
- NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
- return (-1);
- }
-
- /* Parse RTP header */
- ok = WebRtcNetEQ_RTPPayloadInfo(p_w16datagramstart, w16_RTPlen, &RTPpacket);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
-
- ok = WebRtcNetEQ_RecInInternal(&NetEqMainInst->MCUinst, &RTPpacket, uw32_timeRec);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (ok);
-}
-
-/****************************************************************************
- * WebRtcNetEQ_RecInRTPStruct(...)
- *
- * Alternative RecIn function, used when the RTP data has already been
- * parsed into an RTP info struct (WebRtcNetEQ_RTPInfo).
- *
- * Input:
- * - inst : NetEQ instance
- * - rtpInfo : Pointer to RTP info
- * - payloadPtr : Pointer to the RTP payload (first byte after header)
- * - payloadLenBytes : Length (in bytes) of the payload in payloadPtr
- * - timeRec : Receive time (in timestamps of the used codec)
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-int WebRtcNetEQ_RecInRTPStruct(void *inst, WebRtcNetEQ_RTPInfo *rtpInfo,
- const uint8_t *payloadPtr, int16_t payloadLenBytes,
- uint32_t uw32_timeRec)
-{
- int ok = 0;
- RTPPacket_t RTPpacket;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL)
- {
- return (-1);
- }
-
- /* Check for corrupt/cleared instance */
- if (NetEqMainInst->MCUinst.main_inst != NetEqMainInst)
- {
- /* Instance is corrupt */
- NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
- return (-1);
- }
-
- /* Load NetEQ's RTP struct from Module RTP struct */
- RTPpacket.payloadType = rtpInfo->payloadType;
- RTPpacket.seqNumber = rtpInfo->sequenceNumber;
- RTPpacket.timeStamp = rtpInfo->timeStamp;
- RTPpacket.ssrc = rtpInfo->SSRC;
- RTPpacket.payload = (const int16_t*) payloadPtr;
- RTPpacket.payloadLen = payloadLenBytes;
- RTPpacket.starts_byte1 = 0;
-
- ok = WebRtcNetEQ_RecInInternal(&NetEqMainInst->MCUinst, &RTPpacket, uw32_timeRec);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (ok);
-}
-
-int WebRtcNetEQ_RecOut(void *inst, int16_t *pw16_outData, int16_t *pw16_len)
-{
- int ok = 0;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
-#ifdef NETEQ_STEREO
- MasterSlaveInfo msInfo;
- msInfo.msMode = NETEQ_MONO;
-#endif
-
- if (NetEqMainInst == NULL) return (-1);
-
- /* Check for corrupt/cleared instance */
- if (NetEqMainInst->DSPinst.main_inst != NetEqMainInst)
- {
- /* Instance is corrupt */
- NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
- return (-1);
- }
-
-#ifdef NETEQ_STEREO
- NetEqMainInst->DSPinst.msInfo = &msInfo;
-#endif
-
- ok = WebRtcNetEQ_RecOutInternal(&NetEqMainInst->DSPinst, pw16_outData,
- pw16_len, 0 /* not BGN only */, NetEqMainInst->MCUinst.av_sync);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (ok);
-}
-
-/****************************************************************************
- * WebRtcNetEQ_RecOutMasterSlave(...)
- *
- * RecOut function for running several NetEQ instances in master/slave mode.
- * One master can be used to control several slaves.
- *
- * Input:
- * - inst : NetEQ instance
- * - isMaster : Non-zero indicates that this is the master channel
- * - msInfo : (slave only) Information from master
- *
- * Output:
- * - inst : Updated NetEQ instance
- * - pw16_outData : Pointer to vector where output should be written
- * - pw16_len : Pointer to variable where output length is returned
- * - msInfo : (master only) Information to slave(s)
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_RecOutMasterSlave(void *inst, int16_t *pw16_outData,
- int16_t *pw16_len, void *msInfo,
- int16_t isMaster)
-{
-#ifndef NETEQ_STEREO
- /* Stereo not supported */
- return(-1);
-#else
- int ok = 0;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
-
- if (NetEqMainInst == NULL) return (-1);
-
- /* Check for corrupt/cleared instance */
- if (NetEqMainInst->DSPinst.main_inst != NetEqMainInst)
- {
- /* Instance is corrupt */
- NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
- return (-1);
- }
-
- if (msInfo == NULL)
- {
- /* msInfo not provided */
- NetEqMainInst->ErrorCode = NETEQ_OTHER_ERROR;
- return (-1);
- }
-
- /* translate from external to internal Master/Slave information */
- NetEqMainInst->DSPinst.msInfo = (MasterSlaveInfo *) msInfo;
-
- /* check that we have not done a master/slave switch without first re-initializing */
- if ((NetEqMainInst->masterSlave == 1 && !isMaster) || /* switch from master to slave */
- (NetEqMainInst->masterSlave == 2 && isMaster)) /* switch from slave to master */
- {
- NetEqMainInst->ErrorCode = ILLEGAL_MASTER_SLAVE_SWITCH;
- return (-1);
- }
-
- if (!isMaster)
- {
- /* this is the slave */
- NetEqMainInst->masterSlave = 2;
- NetEqMainInst->DSPinst.msInfo->msMode = NETEQ_SLAVE;
- }
- else
- {
- NetEqMainInst->DSPinst.msInfo->msMode = NETEQ_MASTER;
- }
-
- ok = WebRtcNetEQ_RecOutInternal(&NetEqMainInst->DSPinst, pw16_outData,
- pw16_len, 0 /* not BGN only */, NetEqMainInst->MCUinst.av_sync);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
-
- if (isMaster)
- {
- /* this is the master */
- NetEqMainInst->masterSlave = 1;
- }
-
- return (ok);
-#endif
-}
-
-int WebRtcNetEQ_GetMasterSlaveInfoSize()
-{
-#ifdef NETEQ_STEREO
- return (sizeof(MasterSlaveInfo));
-#else
- return(-1);
-#endif
-}
-
-/* Special RecOut that does not do any decoding. */
-int WebRtcNetEQ_RecOutNoDecode(void *inst, int16_t *pw16_outData,
- int16_t *pw16_len)
-{
- int ok = 0;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
-#ifdef NETEQ_STEREO
- MasterSlaveInfo msInfo;
-#endif
-
- if (NetEqMainInst == NULL) return (-1);
-
- /* Check for corrupt/cleared instance */
- if (NetEqMainInst->DSPinst.main_inst != NetEqMainInst)
- {
- /* Instance is corrupt */
- NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
- return (-1);
- }
-
-#ifdef NETEQ_STEREO
- /* keep same mode as before */
- switch (NetEqMainInst->masterSlave)
- {
- case 1:
- {
- msInfo.msMode = NETEQ_MASTER;
- break;
- }
- case 2:
- {
- msInfo.msMode = NETEQ_SLAVE;
- break;
- }
- default:
- {
- msInfo.msMode = NETEQ_MONO;
- break;
- }
- }
-
- NetEqMainInst->DSPinst.msInfo = &msInfo;
-#endif
-
- ok = WebRtcNetEQ_RecOutInternal(&NetEqMainInst->DSPinst, pw16_outData,
- pw16_len, 1 /* BGN only */, NetEqMainInst->MCUinst.av_sync);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (ok);
-}
-
-int WebRtcNetEQ_GetRTCPStats(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst)
-{
- int ok = 0;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- ok = WebRtcNetEQ_RTCPGetStats(&NetEqMainInst->MCUinst.RTCP_inst,
- &RTCP_inst->fraction_lost, &RTCP_inst->cum_lost, &RTCP_inst->ext_max,
- &RTCP_inst->jitter, 0);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (ok);
-}
-
-int WebRtcNetEQ_GetRTCPStatsNoReset(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst)
-{
- int ok = 0;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- ok = WebRtcNetEQ_RTCPGetStats(&NetEqMainInst->MCUinst.RTCP_inst,
- &RTCP_inst->fraction_lost, &RTCP_inst->cum_lost, &RTCP_inst->ext_max,
- &RTCP_inst->jitter, 1);
- if (ok != 0)
- {
- NetEqMainInst->ErrorCode = -ok;
- return (-1);
- }
- return (ok);
-}
-
-int WebRtcNetEQ_GetSpeechTimeStamp(void *inst, uint32_t *timestamp)
-{
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
-
- if (NetEqMainInst->MCUinst.TSscalingInitialized)
- {
- *timestamp = WebRtcNetEQ_ScaleTimestampInternalToExternal(&NetEqMainInst->MCUinst,
- NetEqMainInst->DSPinst.videoSyncTimestamp);
- }
- else
- {
- *timestamp = NetEqMainInst->DSPinst.videoSyncTimestamp;
- }
-
- return (0);
-}
-
-/****************************************************************************
- * WebRtcNetEQ_GetSpeechOutputType(...)
- *
- * Get the output type for the audio provided by the latest call to
- * WebRtcNetEQ_RecOut().
- *
- * kOutputNormal = normal audio (possibly processed)
- * kOutputPLC = loss concealment through stretching audio
- * kOutputCNG = comfort noise (codec-internal or RFC3389)
- * kOutputPLCtoCNG = background noise only due to long expand or error
- * kOutputVADPassive = PostDecode VAD signalling passive speaker
- *
- * Input:
- * - inst : NetEQ instance
- *
- * Output:
- * - outputType : Output type from enum list WebRtcNetEQOutputType
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_GetSpeechOutputType(void *inst, enum WebRtcNetEQOutputType *outputType)
-{
- /* Typecast to internal instance type */
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
-
- if (NetEqMainInst == NULL)
- {
- return (-1);
- }
-
- if ((NetEqMainInst->DSPinst.w16_mode & MODE_BGN_ONLY) != 0)
- {
- /* If last mode was background noise only */
- *outputType = kOutputPLCtoCNG;
-
- }
- else if ((NetEqMainInst->DSPinst.w16_mode == MODE_CODEC_INTERNAL_CNG)
- || (NetEqMainInst->DSPinst.w16_mode == MODE_RFC3389CNG))
- {
- /* If CN or internal CNG */
- *outputType = kOutputCNG;
-
- }
- else if ((NetEqMainInst->DSPinst.w16_mode == MODE_EXPAND)
- && (NetEqMainInst->DSPinst.ExpandInst.w16_expandMuteFactor == 0))
- {
- /* Expand mode has faded down to background noise only (very long expand) */
- *outputType = kOutputPLCtoCNG;
-
- }
- else if (NetEqMainInst->DSPinst.w16_mode == MODE_EXPAND)
- {
- /* PLC mode */
- *outputType = kOutputPLC;
-
-#ifdef NETEQ_VAD
- }
- else if ( NetEqMainInst->DSPinst.VADInst.VADDecision == 0 )
- {
- /* post-decode VAD says passive speaker */
- *outputType = kOutputVADPassive;
-#endif /* NETEQ_VAD */
-
- }
- else
- {
- /* Normal speech output type (can still be manipulated, e.g., accelerated) */
- *outputType = kOutputNormal;
- }
-
- return (0);
-}
-
-/**********************************
- * Functions related to VQmon
- */
-
-#define WEBRTC_NETEQ_CONCEALMENTFLAG_LOST 0x01
-#define WEBRTC_NETEQ_CONCEALMENTFLAG_DISCARDED 0x02
-#define WEBRTC_NETEQ_CONCEALMENTFLAG_SUPRESS 0x04
-#define WEBRTC_NETEQ_CONCEALMENTFLAG_CNGACTIVE 0x80
-
-int WebRtcNetEQ_VQmonRecOutStatistics(void *inst, uint16_t *validVoiceDurationMs,
- uint16_t *concealedVoiceDurationMs,
- uint8_t *concealedVoiceFlags)
-{
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- int16_t fs_mult;
- int16_t ms_lost;
- if (NetEqMainInst == NULL) return (-1);
- fs_mult = WebRtcSpl_DivW32W16ResW16(NetEqMainInst->MCUinst.fs, 8000);
-
- ms_lost = WebRtcSpl_DivW32W16ResW16(
- (int32_t) NetEqMainInst->DSPinst.w16_concealedTS, (int16_t) (8 * fs_mult));
- if (ms_lost > NetEqMainInst->DSPinst.millisecondsPerCall) ms_lost
- = NetEqMainInst->DSPinst.millisecondsPerCall;
-
- *validVoiceDurationMs = NetEqMainInst->DSPinst.millisecondsPerCall - ms_lost;
- *concealedVoiceDurationMs = ms_lost;
- if (ms_lost > 0)
- {
- *concealedVoiceFlags = WEBRTC_NETEQ_CONCEALMENTFLAG_LOST;
- }
- else
- {
- *concealedVoiceFlags = 0;
- }
- NetEqMainInst->DSPinst.w16_concealedTS -= ms_lost * (8 * fs_mult);
-
- return (0);
-}
-
-int WebRtcNetEQ_VQmonGetConfiguration(void *inst, uint16_t *absMaxDelayMs,
- uint8_t *adaptationRate)
-{
- /* Dummy check the inst, just to avoid compiler warnings. */
- if (inst == NULL)
- {
- /* Do nothing. */
- }
-
- /* Hardcoded variables that are used for VQmon as jitter buffer parameters */
- *absMaxDelayMs = 240;
- *adaptationRate = 1;
- return (0);
-}
-
-int WebRtcNetEQ_VQmonGetRxStatistics(void *inst, uint16_t *avgDelayMs,
- uint16_t *maxDelayMs)
-{
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL) return (-1);
- *avgDelayMs = (uint16_t) (NetEqMainInst->MCUinst.BufferStat_inst.avgDelayMsQ8 >> 8);
- *maxDelayMs = (uint16_t) NetEqMainInst->MCUinst.BufferStat_inst.maxDelayMs;
- return (0);
-}
-
-/*************************************
- * Statistics functions
- */
-
-/* Get the "in-call" statistics from NetEQ.
- * The statistics are reset after the query. */
-int WebRtcNetEQ_GetNetworkStatistics(void *inst, WebRtcNetEQ_NetworkStatistics *stats)
-
-{
-
- uint16_t tempU16;
- uint32_t tempU32, tempU32_2;
- int numShift;
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
-
- /* Instance sanity */
- if (NetEqMainInst == NULL) return (-1);
-
- stats->addedSamples = NetEqMainInst->DSPinst.statInst.addedSamples;
-
- /*******************/
- /* Get buffer size */
- /*******************/
-
- if (NetEqMainInst->MCUinst.fs != 0)
- {
- int32_t temp32;
- /* Query packet buffer for number of samples. */
- temp32 = WebRtcNetEQ_PacketBufferGetSize(
- &NetEqMainInst->MCUinst.PacketBuffer_inst,
- &NetEqMainInst->MCUinst.codec_DB_inst,
- NetEqMainInst->MCUinst.av_sync);
-
- /* Divide by sample rate.
- * Calculate temp32 * 1000 / fs to get result in ms. */
- stats->currentBufferSize = (uint16_t)
- WebRtcSpl_DivU32U16(temp32 * 1000, NetEqMainInst->MCUinst.fs);
-
- /* Add number of samples yet to play in sync buffer. */
- temp32 = (int32_t) (NetEqMainInst->DSPinst.endPosition -
- NetEqMainInst->DSPinst.curPosition);
- stats->currentBufferSize += (uint16_t)
- WebRtcSpl_DivU32U16(temp32 * 1000, NetEqMainInst->MCUinst.fs);
- }
- else
- {
- /* Sample rate not initialized. */
- stats->currentBufferSize = 0;
- }
-
- /***************************/
- /* Get optimal buffer size */
- /***************************/
-
- if (NetEqMainInst->MCUinst.fs != 0)
- {
- /* preferredBufferSize = Bopt * packSizeSamples / (fs/1000) */
- stats->preferredBufferSize
- = (uint16_t) WEBRTC_SPL_MUL_16_16(
- (int16_t) ((NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.optBufLevel) >> 8), /* optimal buffer level in packets shifted to Q0 */
- WebRtcSpl_DivW32W16ResW16(
- (int32_t) NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.packetSpeechLenSamp, /* samples per packet */
- WebRtcSpl_DivW32W16ResW16( (int32_t) NetEqMainInst->MCUinst.fs, (int16_t) 1000 ) /* samples per ms */
- ) );
-
- /* add extra delay */
- if (NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.extraDelayMs > 0)
- {
- stats->preferredBufferSize
- += NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.extraDelayMs;
- }
- }
- else
- {
- /* sample rate not initialized */
- stats->preferredBufferSize = 0;
- }
-
- /***********************************/
- /* Check if jitter peaks are found */
- /***********************************/
-
- stats->jitterPeaksFound =
- NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.peakFound;
-
- /***********************/
- /* Calculate loss rate */
- /***********************/
-
- /* timestamps elapsed since last report */
- tempU32 = NetEqMainInst->MCUinst.lastReportTS;
-
- if (NetEqMainInst->MCUinst.lostTS == 0)
- {
- /* no losses */
- stats->currentPacketLossRate = 0;
- }
- else if (NetEqMainInst->MCUinst.lostTS < tempU32)
- {
- /* calculate shifts; we want the result in Q14 */
- numShift = WebRtcSpl_NormU32(NetEqMainInst->MCUinst.lostTS); /* numerator shift for normalize */
-
- if (numShift < 14)
- {
- /* cannot shift numerator 14 steps; shift denominator too */
- tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
- }
- else
- {
- /* shift no more than 14 steps */
- numShift = 14;
- }
-
- if (tempU32 == 0)
- {
- /* check for zero denominator; result should be zero in this case */
- stats->currentPacketLossRate = 0;
- }
- else
- {
- /* check that denominator fits in signed 16-bit */
- while (tempU32 > WEBRTC_SPL_WORD16_MAX)
- {
- tempU32 >>= 1; /* right-shift 1 step */
- numShift--; /* compensate in numerator */
- }
- tempU16 = (uint16_t) tempU32;
-
- /* do the shift of numerator */
- tempU32
- = WEBRTC_SPL_SHIFT_W32( (uint32_t) NetEqMainInst->MCUinst.lostTS, numShift);
-
- stats->currentPacketLossRate = (uint16_t) WebRtcSpl_DivU32U16(tempU32,
- tempU16);
- }
- }
- else
- {
- /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
- /* set loss rate = 1 */
- stats->currentPacketLossRate = 1 << 14; /* 1 in Q14 */
- }
-
- /**************************/
- /* Calculate discard rate */
- /**************************/
-
- /* timestamps elapsed since last report */
- tempU32 = NetEqMainInst->MCUinst.lastReportTS;
-
- /* number of discarded samples */
- tempU32_2
- = WEBRTC_SPL_MUL_16_U16( (int16_t) NetEqMainInst->MCUinst.PacketBuffer_inst.packSizeSamples,
- NetEqMainInst->MCUinst.PacketBuffer_inst.discardedPackets);
-
- if (tempU32_2 == 0)
- {
- /* no discarded samples */
- stats->currentDiscardRate = 0;
- }
- else if (tempU32_2 < tempU32)
- {
- /* calculate shifts; we want the result in Q14 */
- numShift = WebRtcSpl_NormU32(tempU32_2); /* numerator shift for normalize */
-
- if (numShift < 14)
- {
- /* cannot shift numerator 14 steps; shift denominator too */
- tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
- }
- else
- {
- /* shift no more than 14 steps */
- numShift = 14;
- }
-
- if (tempU32 == 0)
- {
- /* check for zero denominator; result should be zero in this case */
- stats->currentDiscardRate = 0;
- }
- else
- {
- /* check that denominator fits in signed 16-bit */
- while (tempU32 > WEBRTC_SPL_WORD16_MAX)
- {
- tempU32 >>= 1; /* right-shift 1 step */
- numShift--; /* compensate in numerator */
- }
- tempU16 = (uint16_t) tempU32;
-
- /* do the shift of numerator */
- tempU32 = WEBRTC_SPL_SHIFT_W32( tempU32_2, numShift);
-
- stats->currentDiscardRate = (uint16_t) WebRtcSpl_DivU32U16(tempU32, tempU16);
- }
- }
- else
- {
- /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
- /* set loss rate = 1 */
- stats->currentDiscardRate = 1 << 14; /* 1 in Q14 */
- }
-
- /*************************************************************/
- /* Calculate Accelerate, Expand and Pre-emptive Expand rates */
- /*************************************************************/
-
- /* timestamps elapsed since last report */
- tempU32 = NetEqMainInst->MCUinst.lastReportTS;
-
- if (NetEqMainInst->DSPinst.statInst.accelerateLength == 0)
- {
- /* no accelerate */
- stats->currentAccelerateRate = 0;
- }
- else if (NetEqMainInst->DSPinst.statInst.accelerateLength < tempU32)
- {
- /* calculate shifts; we want the result in Q14 */
- numShift = WebRtcSpl_NormU32(NetEqMainInst->DSPinst.statInst.accelerateLength); /* numerator shift for normalize */
-
- if (numShift < 14)
- {
- /* cannot shift numerator 14 steps; shift denominator too */
- tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
- }
- else
- {
- /* shift no more than 14 steps */
- numShift = 14;
- }
-
- if (tempU32 == 0)
- {
- /* check for zero denominator; result should be zero in this case */
- stats->currentAccelerateRate = 0;
- }
- else
- {
- /* check that denominator fits in signed 16-bit */
- while (tempU32 > WEBRTC_SPL_WORD16_MAX)
- {
- tempU32 >>= 1; /* right-shift 1 step */
- numShift--; /* compensate in numerator */
- }
- tempU16 = (uint16_t) tempU32;
-
- /* do the shift of numerator */
- tempU32
- = WEBRTC_SPL_SHIFT_W32( NetEqMainInst->DSPinst.statInst.accelerateLength, numShift);
-
- stats->currentAccelerateRate = (uint16_t) WebRtcSpl_DivU32U16(tempU32,
- tempU16);
- }
- }
- else
- {
- /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
- /* set loss rate = 1 */
- stats->currentAccelerateRate = 1 << 14; /* 1 in Q14 */
- }
-
- /* timestamps elapsed since last report */
- tempU32 = NetEqMainInst->MCUinst.lastReportTS;
-
- if (NetEqMainInst->DSPinst.statInst.expandLength == 0)
- {
- /* no expand */
- stats->currentExpandRate = 0;
- }
- else if (NetEqMainInst->DSPinst.statInst.expandLength < tempU32)
- {
- /* calculate shifts; we want the result in Q14 */
- numShift = WebRtcSpl_NormU32(NetEqMainInst->DSPinst.statInst.expandLength); /* numerator shift for normalize */
-
- if (numShift < 14)
- {
- /* cannot shift numerator 14 steps; shift denominator too */
- tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
- }
- else
- {
- /* shift no more than 14 steps */
- numShift = 14;
- }
-
- if (tempU32 == 0)
- {
- /* check for zero denominator; result should be zero in this case */
- stats->currentExpandRate = 0;
- }
- else
- {
- /* check that denominator fits in signed 16-bit */
- while (tempU32 > WEBRTC_SPL_WORD16_MAX)
- {
- tempU32 >>= 1; /* right-shift 1 step */
- numShift--; /* compensate in numerator */
- }
- tempU16 = (uint16_t) tempU32;
-
- /* do the shift of numerator */
- tempU32
- = WEBRTC_SPL_SHIFT_W32( NetEqMainInst->DSPinst.statInst.expandLength, numShift);
-
- stats->currentExpandRate = (uint16_t) WebRtcSpl_DivU32U16(tempU32, tempU16);
- }
- }
- else
- {
- /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
- /* set loss rate = 1 */
- stats->currentExpandRate = 1 << 14; /* 1 in Q14 */
- }
-
- /* timestamps elapsed since last report */
- tempU32 = NetEqMainInst->MCUinst.lastReportTS;
-
- if (NetEqMainInst->DSPinst.statInst.preemptiveLength == 0)
- {
- /* no pre-emptive expand */
- stats->currentPreemptiveRate = 0;
- }
- else if (NetEqMainInst->DSPinst.statInst.preemptiveLength < tempU32)
- {
- /* calculate shifts; we want the result in Q14 */
- numShift = WebRtcSpl_NormU32(NetEqMainInst->DSPinst.statInst.preemptiveLength); /* numerator shift for normalize */
-
- if (numShift < 14)
- {
- /* cannot shift numerator 14 steps; shift denominator too */
- tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
- }
- else
- {
- /* shift no more than 14 steps */
- numShift = 14;
- }
-
- if (tempU32 == 0)
- {
- /* check for zero denominator; result should be zero in this case */
- stats->currentPreemptiveRate = 0;
- }
- else
- {
- /* check that denominator fits in signed 16-bit */
- while (tempU32 > WEBRTC_SPL_WORD16_MAX)
- {
- tempU32 >>= 1; /* right-shift 1 step */
- numShift--; /* compensate in numerator */
- }
- tempU16 = (uint16_t) tempU32;
-
- /* do the shift of numerator */
- tempU32
- = WEBRTC_SPL_SHIFT_W32( NetEqMainInst->DSPinst.statInst.preemptiveLength, numShift);
-
- stats->currentPreemptiveRate = (uint16_t) WebRtcSpl_DivU32U16(tempU32,
- tempU16);
- }
- }
- else
- {
- /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
- /* set loss rate = 1 */
- stats->currentPreemptiveRate = 1 << 14; /* 1 in Q14 */
- }
-
- stats->clockDriftPPM = WebRtcNetEQ_AverageIAT(
- &NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst);
-
- /* reset counters */
- WebRtcNetEQ_ResetMcuInCallStats(&(NetEqMainInst->MCUinst));
- WebRtcNetEQ_ClearInCallStats(&(NetEqMainInst->DSPinst));
-
- return (0);
-}
-
-int WebRtcNetEQ_GetRawFrameWaitingTimes(void *inst,
- int max_length,
- int* waiting_times_ms) {
- int i = 0;
- MainInst_t *main_inst = (MainInst_t*) inst;
- if (main_inst == NULL) return -1;
-
- while ((i < max_length) && (i < main_inst->MCUinst.len_waiting_times)) {
- waiting_times_ms[i] = main_inst->MCUinst.waiting_times[i] *
- main_inst->DSPinst.millisecondsPerCall;
- ++i;
- }
- assert(i <= kLenWaitingTimes);
- WebRtcNetEQ_ResetWaitingTimeStats(&main_inst->MCUinst);
- return i;
-}
-
-/****************************************************************************
- * WebRtcNetEQ_SetVADInstance(...)
- *
- * Provide a pointer to an allocated VAD instance. If function is never
- * called or it is called with NULL pointer as VAD_inst, the post-decode
- * VAD functionality is disabled. Also provide pointers to init, setmode
- * and VAD functions. These are typically pointers to WebRtcVad_Init,
- * WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in the
- * interface file webrtc_vad.h.
- *
- * Input:
- * - NetEQ_inst : NetEQ instance
- * - VADinst : VAD instance
- * - initFunction : Pointer to VAD init function
- * - setmodeFunction : Pointer to VAD setmode function
- * - VADfunction : Pointer to VAD function
- *
- * Output:
- * - NetEQ_inst : Updated NetEQ instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_SetVADInstance(void *NetEQ_inst, void *VAD_inst,
- WebRtcNetEQ_VADInitFunction initFunction,
- WebRtcNetEQ_VADSetmodeFunction setmodeFunction,
- WebRtcNetEQ_VADFunction VADFunction)
-{
-
- /* Typecast to internal instance type */
- MainInst_t *NetEqMainInst = (MainInst_t*) NetEQ_inst;
- if (NetEqMainInst == NULL)
- {
- return (-1);
- }
-
-#ifdef NETEQ_VAD
-
- /* Store pointer in PostDecode VAD struct */
- NetEqMainInst->DSPinst.VADInst.VADState = VAD_inst;
-
- /* Store function pointers */
- NetEqMainInst->DSPinst.VADInst.initFunction = initFunction;
- NetEqMainInst->DSPinst.VADInst.setmodeFunction = setmodeFunction;
- NetEqMainInst->DSPinst.VADInst.VADFunction = VADFunction;
-
- /* Call init function and return the result (ok or fail) */
- return(WebRtcNetEQ_InitVAD(&NetEqMainInst->DSPinst.VADInst, NetEqMainInst->DSPinst.fs));
-
-#else /* NETEQ_VAD not defined */
- return (-1);
-#endif /* NETEQ_VAD */
-
-}
-
-/****************************************************************************
- * WebRtcNetEQ_SetVADMode(...)
- *
- * Pass an aggressiveness mode parameter to the post-decode VAD instance.
- * If this function is never called, mode 0 (quality mode) is used as default.
- *
- * Input:
- * - inst : NetEQ instance
- * - mode : mode parameter (same range as WebRtc VAD mode)
- *
- * Output:
- * - inst : Updated NetEQ instance
- *
- * Return value : 0 - Ok
- * -1 - Error
- */
-
-int WebRtcNetEQ_SetVADMode(void *inst, int mode)
-{
-
- /* Typecast to internal instance type */
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst == NULL)
- {
- return (-1);
- }
-
-#ifdef NETEQ_VAD
-
- /* Set mode and return result */
- return(WebRtcNetEQ_SetVADModeInternal(&NetEqMainInst->DSPinst.VADInst, mode));
-
-#else /* NETEQ_VAD not defined */
- return (-1);
-#endif /* NETEQ_VAD */
-
-}
-
-void WebRtcNetEQ_GetProcessingActivity(void *inst,
- WebRtcNetEQ_ProcessingActivity *stats) {
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
-
- stats->accelerate_bgn_samples =
- NetEqMainInst->DSPinst.activity_stats.accelerate_bgn_samples;
- stats->accelerate_normal_samples =
- NetEqMainInst->DSPinst.activity_stats.accelarate_normal_samples;
-
- stats->expand_bgn_sampels =
- NetEqMainInst->DSPinst.activity_stats.expand_bgn_samples;
- stats->expand_normal_samples =
- NetEqMainInst->DSPinst.activity_stats.expand_normal_samples;
-
- stats->preemptive_expand_bgn_samples =
- NetEqMainInst->DSPinst.activity_stats.preemptive_expand_bgn_samples;
- stats->preemptive_expand_normal_samples =
- NetEqMainInst->DSPinst.activity_stats.preemptive_expand_normal_samples;
-
- stats->merge_expand_bgn_samples =
- NetEqMainInst->DSPinst.activity_stats.merge_expand_bgn_samples;
- stats->merge_expand_normal_samples =
- NetEqMainInst->DSPinst.activity_stats.merge_expand_normal_samples;
-
- WebRtcNetEQ_ClearActivityStats(&NetEqMainInst->DSPinst);
-}
-
-void WebRtcNetEQ_EnableAVSync(void* inst, int enable) {
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- NetEqMainInst->MCUinst.av_sync = (enable != 0) ? 1 : 0;
-}
-
-int WebRtcNetEQ_RecInSyncRTP(void* inst, WebRtcNetEQ_RTPInfo* rtp_info,
- uint32_t receive_timestamp) {
- MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- if (NetEqMainInst->MCUinst.av_sync == 0)
- return -1;
- if (WebRtcNetEQ_RecInRTPStruct(inst, rtp_info, kSyncPayload,
- SYNC_PAYLOAD_LEN_BYTES,
- receive_timestamp) < 0) {
- return -1;
- }
- return SYNC_PAYLOAD_LEN_BYTES;
-}
-
-int WebRtcNetEQ_GetRequiredDelayMs(const void* inst) {
- const MainInst_t* NetEqMainInst = (MainInst_t*)inst;
- const AutomodeInst_t* auto_mode = (NetEqMainInst == NULL) ? NULL :
- &NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst;
-
- /* Instance sanity */
- if (NetEqMainInst == NULL || auto_mode == NULL)
- return 0;
-
- if (NetEqMainInst->MCUinst.fs == 0)
- return 0; // Sampling rate not initialized.
-
- /* |required_delay_q8| has the unit of packets in Q8 domain, therefore,
- * the corresponding delay is
- * required_delay_ms = (1000 * required_delay_q8 * samples_per_packet /
- * sample_rate_hz) / 256;
- */
- return (auto_mode->required_delay_q8 *
- ((auto_mode->packetSpeechLenSamp * 1000) / NetEqMainInst->MCUinst.fs) +
- 128) >> 8;
-}
-
-int WebRtcNetEQ_DecodedRtpInfo(const void* inst,
- int* sequence_number,
- uint32_t* timestamp) {
- const MainInst_t *NetEqMainInst = (inst == NULL) ? NULL :
- (const MainInst_t*) inst;
- if (NetEqMainInst->MCUinst.decoded_packet_sequence_number < 0)
- return -1;
- *sequence_number = NetEqMainInst->MCUinst.decoded_packet_sequence_number;
- *timestamp = NetEqMainInst->MCUinst.decoded_packet_timestamp;
- return 0;
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq/webrtc_neteq_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq/webrtc_neteq_unittest.cc
deleted file mode 100644
index c37f8990a8b..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq/webrtc_neteq_unittest.cc
+++ /dev/null
@@ -1,778 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/*
- * This file includes unit tests for NetEQ.
- */
-
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h"
-
-#include <stdlib.h>
-#include <string.h> // memset
-
-#include <set>
-#include <sstream>
-#include <string>
-#include <vector>
-
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
-#include "webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h"
-#include "webrtc/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.h"
-#include "webrtc/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.h"
-#include "webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h"
-#include "webrtc/modules/audio_coding/neteq4/tools/input_audio_file.h"
-#include "webrtc/test/testsupport/fileutils.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-class RefFiles {
- public:
- RefFiles(const std::string& input_file, const std::string& output_file);
- ~RefFiles();
- template<class T> void ProcessReference(const T& test_results);
- template<typename T, size_t n> void ProcessReference(
- const T (&test_results)[n],
- size_t length);
- template<typename T, size_t n> void WriteToFile(
- const T (&test_results)[n],
- size_t length);
- template<typename T, size_t n> void ReadFromFileAndCompare(
- const T (&test_results)[n],
- size_t length);
- void WriteToFile(const WebRtcNetEQ_NetworkStatistics& stats);
- void ReadFromFileAndCompare(const WebRtcNetEQ_NetworkStatistics& stats);
- void WriteToFile(const WebRtcNetEQ_RTCPStat& stats);
- void ReadFromFileAndCompare(const WebRtcNetEQ_RTCPStat& stats);
-
- FILE* input_fp_;
- FILE* output_fp_;
-};
-
-RefFiles::RefFiles(const std::string &input_file,
- const std::string &output_file)
- : input_fp_(NULL),
- output_fp_(NULL) {
- if (!input_file.empty()) {
- input_fp_ = fopen(input_file.c_str(), "rb");
- EXPECT_TRUE(input_fp_ != NULL);
- }
- if (!output_file.empty()) {
- output_fp_ = fopen(output_file.c_str(), "wb");
- EXPECT_TRUE(output_fp_ != NULL);
- }
-}
-
-RefFiles::~RefFiles() {
- if (input_fp_) {
- EXPECT_EQ(EOF, fgetc(input_fp_)); // Make sure that we reached the end.
- fclose(input_fp_);
- }
- if (output_fp_) fclose(output_fp_);
-}
-
-template<class T>
-void RefFiles::ProcessReference(const T& test_results) {
- WriteToFile(test_results);
- ReadFromFileAndCompare(test_results);
-}
-
-template<typename T, size_t n>
-void RefFiles::ProcessReference(const T (&test_results)[n], size_t length) {
- WriteToFile(test_results, length);
- ReadFromFileAndCompare(test_results, length);
-}
-
-template<typename T, size_t n>
-void RefFiles::WriteToFile(const T (&test_results)[n], size_t length) {
- if (output_fp_) {
- ASSERT_EQ(length, fwrite(&test_results, sizeof(T), length, output_fp_));
- }
-}
-
-template<typename T, size_t n>
-void RefFiles::ReadFromFileAndCompare(const T (&test_results)[n],
- size_t length) {
- if (input_fp_) {
- // Read from ref file.
- T* ref = new T[length];
- ASSERT_EQ(length, fread(ref, sizeof(T), length, input_fp_));
- // Compare
- ASSERT_EQ(0, memcmp(&test_results, ref, sizeof(T) * length));
- delete [] ref;
- }
-}
-
-void RefFiles::WriteToFile(const WebRtcNetEQ_NetworkStatistics& stats) {
- if (output_fp_) {
- ASSERT_EQ(1u, fwrite(&stats, sizeof(WebRtcNetEQ_NetworkStatistics), 1,
- output_fp_));
- }
-}
-
-void RefFiles::ReadFromFileAndCompare(
- const WebRtcNetEQ_NetworkStatistics& stats) {
- if (input_fp_) {
- // Read from ref file.
- size_t stat_size = sizeof(WebRtcNetEQ_NetworkStatistics);
- WebRtcNetEQ_NetworkStatistics ref_stats;
- ASSERT_EQ(1u, fread(&ref_stats, stat_size, 1, input_fp_));
- // Compare
- EXPECT_EQ(0, memcmp(&stats, &ref_stats, stat_size));
- }
-}
-
-void RefFiles::WriteToFile(const WebRtcNetEQ_RTCPStat& stats) {
- if (output_fp_) {
- ASSERT_EQ(1u, fwrite(&(stats.fraction_lost), sizeof(stats.fraction_lost), 1,
- output_fp_));
- ASSERT_EQ(1u, fwrite(&(stats.cum_lost), sizeof(stats.cum_lost), 1,
- output_fp_));
- ASSERT_EQ(1u, fwrite(&(stats.ext_max), sizeof(stats.ext_max), 1,
- output_fp_));
- ASSERT_EQ(1u, fwrite(&(stats.jitter), sizeof(stats.jitter), 1,
- output_fp_));
- }
-}
-
-void RefFiles::ReadFromFileAndCompare(
- const WebRtcNetEQ_RTCPStat& stats) {
- if (input_fp_) {
- // Read from ref file.
- WebRtcNetEQ_RTCPStat ref_stats;
- ASSERT_EQ(1u, fread(&(ref_stats.fraction_lost),
- sizeof(ref_stats.fraction_lost), 1, input_fp_));
- ASSERT_EQ(1u, fread(&(ref_stats.cum_lost), sizeof(ref_stats.cum_lost), 1,
- input_fp_));
- ASSERT_EQ(1u, fread(&(ref_stats.ext_max), sizeof(ref_stats.ext_max), 1,
- input_fp_));
- ASSERT_EQ(1u, fread(&(ref_stats.jitter), sizeof(ref_stats.jitter), 1,
- input_fp_));
- // Compare
- EXPECT_EQ(ref_stats.fraction_lost, stats.fraction_lost);
- EXPECT_EQ(ref_stats.cum_lost, stats.cum_lost);
- EXPECT_EQ(ref_stats.ext_max, stats.ext_max);
- EXPECT_EQ(ref_stats.jitter, stats.jitter);
- }
-}
-
-class NetEqDecodingTest : public ::testing::Test {
- protected:
- // NetEQ must be polled for data once every 10 ms. Thus, neither of the
- // constants below can be changed.
- static const int kTimeStepMs = 10;
- static const int kBlockSize8kHz = kTimeStepMs * 8;
- static const int kBlockSize16kHz = kTimeStepMs * 16;
- static const int kBlockSize32kHz = kTimeStepMs * 32;
- static const int kMaxBlockSize = kBlockSize32kHz;
-
- NetEqDecodingTest();
- virtual void SetUp();
- virtual void TearDown();
- void SelectDecoders(WebRtcNetEQDecoder* used_codec);
- void LoadDecoders();
- void OpenInputFile(const std::string &rtp_file);
- void Process(NETEQTEST_RTPpacket* rtp_ptr, int16_t* out_len);
- void DecodeAndCompare(const std::string &rtp_file,
- const std::string &ref_file);
- void DecodeAndCheckStats(const std::string &rtp_file,
- const std::string &stat_ref_file,
- const std::string &rtcp_ref_file);
- static void PopulateRtpInfo(int frame_index,
- int timestamp,
- WebRtcNetEQ_RTPInfo* rtp_info);
- static void PopulateCng(int frame_index,
- int timestamp,
- WebRtcNetEQ_RTPInfo* rtp_info,
- uint8_t* payload,
- int* payload_len);
- void WrapTest(uint16_t start_seq_no, uint32_t start_timestamp,
- const std::set<uint16_t>& drop_seq_numbers);
-
- NETEQTEST_NetEQClass* neteq_inst_;
- std::vector<NETEQTEST_Decoder*> dec_;
- FILE* rtp_fp_;
- unsigned int sim_clock_;
- int16_t out_data_[kMaxBlockSize];
-};
-
-NetEqDecodingTest::NetEqDecodingTest()
- : neteq_inst_(NULL),
- rtp_fp_(NULL),
- sim_clock_(0) {
- memset(out_data_, 0, sizeof(out_data_));
-}
-
-void NetEqDecodingTest::SetUp() {
- WebRtcNetEQDecoder usedCodec[kDecoderReservedEnd - 1];
-
- SelectDecoders(usedCodec);
- neteq_inst_ = new NETEQTEST_NetEQClass(usedCodec, dec_.size(), 8000,
- kTCPLargeJitter);
- ASSERT_TRUE(neteq_inst_);
- LoadDecoders();
-}
-
-void NetEqDecodingTest::TearDown() {
- if (neteq_inst_)
- delete neteq_inst_;
- for (size_t i = 0; i < dec_.size(); ++i) {
- if (dec_[i])
- delete dec_[i];
- }
- if (rtp_fp_)
- fclose(rtp_fp_);
-}
-
-void NetEqDecodingTest::SelectDecoders(WebRtcNetEQDecoder* used_codec) {
- *used_codec++ = kDecoderPCMu;
- dec_.push_back(new decoder_PCMU(0));
- *used_codec++ = kDecoderPCMa;
- dec_.push_back(new decoder_PCMA(8));
- *used_codec++ = kDecoderILBC;
- dec_.push_back(new decoder_ILBC(102));
- *used_codec++ = kDecoderISAC;
- dec_.push_back(new decoder_iSAC(103));
- *used_codec++ = kDecoderISACswb;
- dec_.push_back(new decoder_iSACSWB(104));
- *used_codec++ = kDecoderISACfb;
- dec_.push_back(new decoder_iSACFB(105));
- *used_codec++ = kDecoderPCM16B;
- dec_.push_back(new decoder_PCM16B_NB(93));
- *used_codec++ = kDecoderPCM16Bwb;
- dec_.push_back(new decoder_PCM16B_WB(94));
- *used_codec++ = kDecoderPCM16Bswb32kHz;
- dec_.push_back(new decoder_PCM16B_SWB32(95));
- *used_codec++ = kDecoderCNG;
- dec_.push_back(new decoder_CNG(13, 8000));
- *used_codec++ = kDecoderCNG;
- dec_.push_back(new decoder_CNG(98, 16000));
-}
-
-void NetEqDecodingTest::LoadDecoders() {
- for (size_t i = 0; i < dec_.size(); ++i) {
- ASSERT_EQ(0, dec_[i]->loadToNetEQ(*neteq_inst_));
- }
-}
-
-void NetEqDecodingTest::OpenInputFile(const std::string &rtp_file) {
- rtp_fp_ = fopen(rtp_file.c_str(), "rb");
- ASSERT_TRUE(rtp_fp_ != NULL);
- ASSERT_EQ(0, NETEQTEST_RTPpacket::skipFileHeader(rtp_fp_));
-}
-
-void NetEqDecodingTest::Process(NETEQTEST_RTPpacket* rtp, int16_t* out_len) {
- // Check if time to receive.
- while ((sim_clock_ >= rtp->time()) &&
- (rtp->dataLen() >= 0)) {
- if (rtp->dataLen() > 0) {
- ASSERT_EQ(0, neteq_inst_->recIn(*rtp));
- }
- // Get next packet.
- ASSERT_NE(-1, rtp->readFromFile(rtp_fp_));
- }
-
- // RecOut
- *out_len = neteq_inst_->recOut(out_data_);
- ASSERT_TRUE((*out_len == kBlockSize8kHz) ||
- (*out_len == kBlockSize16kHz) ||
- (*out_len == kBlockSize32kHz));
-
- // Increase time.
- sim_clock_ += kTimeStepMs;
-}
-
-void NetEqDecodingTest::DecodeAndCompare(const std::string &rtp_file,
- const std::string &ref_file) {
- OpenInputFile(rtp_file);
-
- std::string ref_out_file = "";
- if (ref_file.empty()) {
- ref_out_file = webrtc::test::OutputPath() + "neteq_out.pcm";
- }
- RefFiles ref_files(ref_file, ref_out_file);
-
- NETEQTEST_RTPpacket rtp;
- ASSERT_GT(rtp.readFromFile(rtp_fp_), 0);
- int i = 0;
- while (rtp.dataLen() >= 0) {
- std::ostringstream ss;
- ss << "Lap number " << i++ << " in DecodeAndCompare while loop";
- SCOPED_TRACE(ss.str()); // Print out the parameter values on failure.
- int16_t out_len;
- ASSERT_NO_FATAL_FAILURE(Process(&rtp, &out_len));
- ASSERT_NO_FATAL_FAILURE(ref_files.ProcessReference(out_data_, out_len));
- }
-}
-
-void NetEqDecodingTest::DecodeAndCheckStats(const std::string &rtp_file,
- const std::string &stat_ref_file,
- const std::string &rtcp_ref_file) {
- OpenInputFile(rtp_file);
- std::string stat_out_file = "";
- if (stat_ref_file.empty()) {
- stat_out_file = webrtc::test::OutputPath() +
- "neteq_network_stats.dat";
- }
- RefFiles network_stat_files(stat_ref_file, stat_out_file);
-
- std::string rtcp_out_file = "";
- if (rtcp_ref_file.empty()) {
- rtcp_out_file = webrtc::test::OutputPath() +
- "neteq_rtcp_stats.dat";
- }
- RefFiles rtcp_stat_files(rtcp_ref_file, rtcp_out_file);
-
- NETEQTEST_RTPpacket rtp;
- ASSERT_GT(rtp.readFromFile(rtp_fp_), 0);
- while (rtp.dataLen() >= 0) {
- int16_t out_len;
- Process(&rtp, &out_len);
-
- // Query the network statistics API once per second
- if (sim_clock_ % 1000 == 0) {
- // Process NetworkStatistics.
- WebRtcNetEQ_NetworkStatistics network_stats;
- ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(),
- &network_stats));
- network_stat_files.ProcessReference(network_stats);
-
- // Process RTCPstat.
- WebRtcNetEQ_RTCPStat rtcp_stats;
- ASSERT_EQ(0, WebRtcNetEQ_GetRTCPStats(neteq_inst_->instance(),
- &rtcp_stats));
- rtcp_stat_files.ProcessReference(rtcp_stats);
- }
- }
-}
-
-void NetEqDecodingTest::PopulateRtpInfo(int frame_index,
- int timestamp,
- WebRtcNetEQ_RTPInfo* rtp_info) {
- rtp_info->sequenceNumber = frame_index;
- rtp_info->timeStamp = timestamp;
- rtp_info->SSRC = 0x1234; // Just an arbitrary SSRC.
- rtp_info->payloadType = 94; // PCM16b WB codec.
- rtp_info->markerBit = 0;
-}
-
-void NetEqDecodingTest::PopulateCng(int frame_index,
- int timestamp,
- WebRtcNetEQ_RTPInfo* rtp_info,
- uint8_t* payload,
- int* payload_len) {
- rtp_info->sequenceNumber = frame_index;
- rtp_info->timeStamp = timestamp;
- rtp_info->SSRC = 0x1234; // Just an arbitrary SSRC.
- rtp_info->payloadType = 98; // WB CNG.
- rtp_info->markerBit = 0;
- payload[0] = 64; // Noise level -64 dBov, quite arbitrarily chosen.
- *payload_len = 1; // Only noise level, no spectral parameters.
-}
-
-#if (defined(_WIN32) && defined(WEBRTC_ARCH_64_BITS)) || defined(WEBRTC_ANDROID)
-// Disabled for Windows 64-bit until webrtc:1460 is fixed.
-#define MAYBE_TestBitExactness DISABLED_TestBitExactness
-#else
-#define MAYBE_TestBitExactness TestBitExactness
-#endif
-
-TEST_F(NetEqDecodingTest, MAYBE_TestBitExactness) {
- const std::string kInputRtpFile = webrtc::test::ProjectRootPath() +
- "resources/audio_coding/neteq_universal.rtp";
-#if defined(_MSC_VER) && (_MSC_VER >= 1700)
- // For Visual Studio 2012 and later, we will have to use the generic reference
- // file, rather than the windows-specific one.
- const std::string kInputRefFile = webrtc::test::ProjectRootPath() +
- "resources/audio_coding/neteq_universal_ref.pcm";
-#else
- const std::string kInputRefFile =
- webrtc::test::ResourcePath("audio_coding/neteq_universal_ref", "pcm");
-#endif
- DecodeAndCompare(kInputRtpFile, kInputRefFile);
-}
-
-TEST_F(NetEqDecodingTest, TestNetworkStatistics) {
- const std::string kInputRtpFile = webrtc::test::ProjectRootPath() +
- "resources/audio_coding/neteq_universal.rtp";
-#if defined(_MSC_VER) && (_MSC_VER >= 1700)
- // For Visual Studio 2012 and later, we will have to use the generic reference
- // file, rather than the windows-specific one.
- const std::string kNetworkStatRefFile = webrtc::test::ProjectRootPath() +
- "resources/audio_coding/neteq_network_stats.dat";
-#else
- const std::string kNetworkStatRefFile =
- webrtc::test::ResourcePath("audio_coding/neteq_network_stats", "dat");
-#endif
- const std::string kRtcpStatRefFile =
- webrtc::test::ResourcePath("audio_coding/neteq_rtcp_stats", "dat");
- DecodeAndCheckStats(kInputRtpFile, kNetworkStatRefFile, kRtcpStatRefFile);
-}
-
-TEST_F(NetEqDecodingTest, TestFrameWaitingTimeStatistics) {
- // Use fax mode to avoid time-scaling. This is to simplify the testing of
- // packet waiting times in the packet buffer.
- ASSERT_EQ(0,
- WebRtcNetEQ_SetPlayoutMode(neteq_inst_->instance(), kPlayoutFax));
- // Insert 30 dummy packets at once. Each packet contains 10 ms 16 kHz audio.
- int num_frames = 30;
- const int kSamples = 10 * 16;
- const int kPayloadBytes = kSamples * 2;
- for (int i = 0; i < num_frames; ++i) {
- uint16_t payload[kSamples] = {0};
- WebRtcNetEQ_RTPInfo rtp_info;
- rtp_info.sequenceNumber = i;
- rtp_info.timeStamp = i * kSamples;
- rtp_info.SSRC = 0x1234; // Just an arbitrary SSRC.
- rtp_info.payloadType = 94; // PCM16b WB codec.
- rtp_info.markerBit = 0;
- ASSERT_EQ(0, WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(), &rtp_info,
- reinterpret_cast<uint8_t*>(payload),
- kPayloadBytes, 0));
- }
- // Pull out all data.
- for (int i = 0; i < num_frames; ++i) {
- ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
- }
- const int kVecLen = 110; // More than kLenWaitingTimes in mcu.h.
- int waiting_times[kVecLen];
- int len = WebRtcNetEQ_GetRawFrameWaitingTimes(neteq_inst_->instance(),
- kVecLen, waiting_times);
- EXPECT_EQ(num_frames, len);
- // Since all frames are dumped into NetEQ at once, but pulled out with 10 ms
- // spacing (per definition), we expect the delay to increase with 10 ms for
- // each packet.
- for (int i = 0; i < len; ++i) {
- EXPECT_EQ((i + 1) * 10, waiting_times[i]);
- }
-
- // Check statistics again and make sure it's been reset.
- EXPECT_EQ(0, WebRtcNetEQ_GetRawFrameWaitingTimes(neteq_inst_->instance(),
- kVecLen, waiting_times));
-
- // Process > 100 frames, and make sure that that we get statistics
- // only for 100 frames. Note the new SSRC, causing NetEQ to reset.
- num_frames = 110;
- for (int i = 0; i < num_frames; ++i) {
- uint16_t payload[kSamples] = {0};
- WebRtcNetEQ_RTPInfo rtp_info;
- rtp_info.sequenceNumber = i;
- rtp_info.timeStamp = i * kSamples;
- rtp_info.SSRC = 0x1235; // Just an arbitrary SSRC.
- rtp_info.payloadType = 94; // PCM16b WB codec.
- rtp_info.markerBit = 0;
- ASSERT_EQ(0, WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(), &rtp_info,
- reinterpret_cast<uint8_t*>(payload),
- kPayloadBytes, 0));
- ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
- }
-
- len = WebRtcNetEQ_GetRawFrameWaitingTimes(neteq_inst_->instance(),
- kVecLen, waiting_times);
- EXPECT_EQ(100, len);
-}
-
-TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimeNegative) {
- const int kNumFrames = 3000; // Needed for convergence.
- int frame_index = 0;
- const int kSamples = 10 * 16;
- const int kPayloadBytes = kSamples * 2;
- while (frame_index < kNumFrames) {
- // Insert one packet each time, except every 10th time where we insert two
- // packets at once. This will create a negative clock-drift of approx. 10%.
- int num_packets = (frame_index % 10 == 0 ? 2 : 1);
- for (int n = 0; n < num_packets; ++n) {
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcNetEQ_RTPInfo rtp_info;
- PopulateRtpInfo(frame_index, frame_index * kSamples, &rtp_info);
- ASSERT_EQ(0,
- WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
- &rtp_info,
- payload,
- kPayloadBytes, 0));
- ++frame_index;
- }
-
- // Pull out data once.
- ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
- }
-
- WebRtcNetEQ_NetworkStatistics network_stats;
- ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(),
- &network_stats));
- EXPECT_EQ(-103196, network_stats.clockDriftPPM);
-}
-
-TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimePositive) {
- const int kNumFrames = 5000; // Needed for convergence.
- int frame_index = 0;
- const int kSamples = 10 * 16;
- const int kPayloadBytes = kSamples * 2;
- for (int i = 0; i < kNumFrames; ++i) {
- // Insert one packet each time, except every 10th time where we don't insert
- // any packet. This will create a positive clock-drift of approx. 11%.
- int num_packets = (i % 10 == 9 ? 0 : 1);
- for (int n = 0; n < num_packets; ++n) {
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcNetEQ_RTPInfo rtp_info;
- PopulateRtpInfo(frame_index, frame_index * kSamples, &rtp_info);
- ASSERT_EQ(0,
- WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
- &rtp_info,
- payload,
- kPayloadBytes, 0));
- ++frame_index;
- }
-
- // Pull out data once.
- ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
- }
-
- WebRtcNetEQ_NetworkStatistics network_stats;
- ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(),
- &network_stats));
- EXPECT_EQ(110946, network_stats.clockDriftPPM);
-}
-
-TEST_F(NetEqDecodingTest, LongCngWithClockDrift) {
- uint16_t seq_no = 0;
- uint32_t timestamp = 0;
- const int kFrameSizeMs = 30;
- const int kSamples = kFrameSizeMs * 16;
- const int kPayloadBytes = kSamples * 2;
- // Apply a clock drift of -25 ms / s (sender faster than receiver).
- const double kDriftFactor = 1000.0 / (1000.0 + 25.0);
- double next_input_time_ms = 0.0;
- double t_ms;
-
- // Insert speech for 5 seconds.
- const int kSpeechDurationMs = 5000;
- for (t_ms = 0; t_ms < kSpeechDurationMs; t_ms += 10) {
- // Each turn in this for loop is 10 ms.
- while (next_input_time_ms <= t_ms) {
- // Insert one 30 ms speech frame.
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcNetEQ_RTPInfo rtp_info;
- PopulateRtpInfo(seq_no, timestamp, &rtp_info);
- ASSERT_EQ(0,
- WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
- &rtp_info,
- payload,
- kPayloadBytes, 0));
- ++seq_no;
- timestamp += kSamples;
- next_input_time_ms += static_cast<double>(kFrameSizeMs) * kDriftFactor;
- }
- // Pull out data once.
- ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
- }
-
- EXPECT_EQ(kOutputNormal, neteq_inst_->getOutputType());
- int32_t delay_before = timestamp - neteq_inst_->getSpeechTimeStamp();
-
- // Insert CNG for 1 minute (= 60000 ms).
- const int kCngPeriodMs = 100;
- const int kCngPeriodSamples = kCngPeriodMs * 16; // Period in 16 kHz samples.
- const int kCngDurationMs = 60000;
- for (; t_ms < kSpeechDurationMs + kCngDurationMs; t_ms += 10) {
- // Each turn in this for loop is 10 ms.
- while (next_input_time_ms <= t_ms) {
- // Insert one CNG frame each 100 ms.
- uint8_t payload[kPayloadBytes];
- int payload_len;
- WebRtcNetEQ_RTPInfo rtp_info;
- PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
- ASSERT_EQ(0,
- WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
- &rtp_info,
- payload,
- payload_len, 0));
- ++seq_no;
- timestamp += kCngPeriodSamples;
- next_input_time_ms += static_cast<double>(kCngPeriodMs) * kDriftFactor;
- }
- // Pull out data once.
- ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
- }
-
- EXPECT_EQ(kOutputCNG, neteq_inst_->getOutputType());
-
- // Insert speech again until output type is speech.
- while (neteq_inst_->getOutputType() != kOutputNormal) {
- // Each turn in this for loop is 10 ms.
- while (next_input_time_ms <= t_ms) {
- // Insert one 30 ms speech frame.
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcNetEQ_RTPInfo rtp_info;
- PopulateRtpInfo(seq_no, timestamp, &rtp_info);
- ASSERT_EQ(0,
- WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
- &rtp_info,
- payload,
- kPayloadBytes, 0));
- ++seq_no;
- timestamp += kSamples;
- next_input_time_ms += static_cast<double>(kFrameSizeMs) * kDriftFactor;
- }
- // Pull out data once.
- ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
- // Increase clock.
- t_ms += 10;
- }
-
- int32_t delay_after = timestamp - neteq_inst_->getSpeechTimeStamp();
- // Compare delay before and after, and make sure it differs less than 20 ms.
- EXPECT_LE(delay_after, delay_before + 20 * 16);
- EXPECT_GE(delay_after, delay_before - 20 * 16);
-}
-
-TEST_F(NetEqDecodingTest, NoInputDataStereo) {
- void *ms_info;
- ms_info = malloc(WebRtcNetEQ_GetMasterSlaveInfoSize());
- neteq_inst_->setMaster();
-
- // Slave instance without decoders (because it is easier).
- WebRtcNetEQDecoder usedCodec[kDecoderReservedEnd - 1];
- usedCodec[0] = kDecoderPCMu;
- NETEQTEST_NetEQClass* slave_inst =
- new NETEQTEST_NetEQClass(usedCodec, 1, 8000, kTCPLargeJitter);
- ASSERT_TRUE(slave_inst);
- NETEQTEST_Decoder* dec = new decoder_PCMU(0);
- ASSERT_TRUE(dec != NULL);
- dec->loadToNetEQ(*slave_inst);
- slave_inst->setSlave();
-
- // Pull out data.
- const int kNumFrames = 100;
- for (int i = 0; i < kNumFrames; ++i) {
- ASSERT_TRUE(kBlockSize8kHz == neteq_inst_->recOut(out_data_, ms_info));
- ASSERT_TRUE(kBlockSize8kHz == slave_inst->recOut(out_data_, ms_info));
- }
-
- delete dec;
- delete slave_inst;
- free(ms_info);
-}
-
-TEST_F(NetEqDecodingTest, TestExtraDelay) {
- static const int kNumFrames = 120000; // Needed for convergence.
- int frame_index = 0;
- static const int kFrameSizeSamples = 30 * 16;
- static const int kPayloadBytes = kFrameSizeSamples * 2;
- test::InputAudioFile input_file(
- webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"));
- int16_t input[kFrameSizeSamples];
- // Buffers of NetEq cannot accommodate larger delays for PCM16.
- static const int kExtraDelayMs = 3200;
- ASSERT_EQ(0, WebRtcNetEQ_SetExtraDelay(neteq_inst_->instance(),
- kExtraDelayMs));
- for (int i = 0; i < kNumFrames; ++i) {
- ASSERT_TRUE(input_file.Read(kFrameSizeSamples, input));
- WebRtcNetEQ_RTPInfo rtp_info;
- PopulateRtpInfo(frame_index, frame_index * kFrameSizeSamples, &rtp_info);
- uint8_t* payload = reinterpret_cast<uint8_t*>(input);
- ASSERT_EQ(0,
- WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
- &rtp_info,
- payload,
- kPayloadBytes, 0));
- ++frame_index;
- // Pull out data.
- for (int j = 0; j < 3; ++j) {
- ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
- }
- if (i % 100 == 0) {
- WebRtcNetEQ_NetworkStatistics network_stats;
- ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(),
- &network_stats));
- const int expected_lower_limit =
- std::min(i * 0.083 - 210, 0.9 * network_stats.preferredBufferSize);
- EXPECT_GE(network_stats.currentBufferSize, expected_lower_limit);
- const int expected_upper_limit =
- std::min(i * 0.083 + 255, 1.2 * network_stats.preferredBufferSize);
- EXPECT_LE(network_stats.currentBufferSize, expected_upper_limit);
- }
- }
-}
-
-void NetEqDecodingTest::WrapTest(uint16_t start_seq_no,
- uint32_t start_timestamp,
- const std::set<uint16_t>& drop_seq_numbers) {
- uint16_t seq_no = start_seq_no;
- uint32_t timestamp = start_timestamp;
- const int kFrameSizeMs = 30;
- const int kSamples = kFrameSizeMs * 16;
- const int kPayloadBytes = kSamples * 2;
- double next_input_time_ms = 0.0;
-
- // Insert speech for 1 second.
- const int kSpeechDurationMs = 1000;
- for (double t_ms = 0; t_ms < kSpeechDurationMs; t_ms += 10) {
- // Each turn in this for loop is 10 ms.
- while (next_input_time_ms <= t_ms) {
- // Insert one 30 ms speech frame.
- uint8_t payload[kPayloadBytes] = {0};
- WebRtcNetEQ_RTPInfo rtp_info;
- PopulateRtpInfo(seq_no, timestamp, &rtp_info);
- if (drop_seq_numbers.find(seq_no) == drop_seq_numbers.end()) {
- // This sequence number was not in the set to drop. Insert it.
- ASSERT_EQ(0,
- WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
- &rtp_info,
- payload,
- kPayloadBytes, 0));
- }
- ++seq_no;
- timestamp += kSamples;
- next_input_time_ms += static_cast<double>(kFrameSizeMs);
- WebRtcNetEQ_NetworkStatistics network_stats;
- ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(),
- &network_stats));
- // Expect preferred and actual buffer size to be no more than 2 frames.
- EXPECT_LE(network_stats.preferredBufferSize, kFrameSizeMs * 2);
- EXPECT_LE(network_stats.currentBufferSize, kFrameSizeMs * 2);
- }
- // Pull out data once.
- ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
- // Expect delay (in samples) to be less than 2 packets.
- EXPECT_LE(timestamp - neteq_inst_->getSpeechTimeStamp(),
- static_cast<uint32_t>(kSamples * 2));
- }
-}
-
-TEST_F(NetEqDecodingTest, SequenceNumberWrap) {
- // Start with a sequence number that will soon wrap.
- std::set<uint16_t> drop_seq_numbers; // Don't drop any packets.
- WrapTest(0xFFFF - 5, 0, drop_seq_numbers);
-}
-
-TEST_F(NetEqDecodingTest, SequenceNumberWrapAndDrop) {
- // Start with a sequence number that will soon wrap.
- std::set<uint16_t> drop_seq_numbers;
- drop_seq_numbers.insert(0xFFFF);
- drop_seq_numbers.insert(0x0);
- WrapTest(0xFFFF - 5, 0, drop_seq_numbers);
-}
-
-TEST_F(NetEqDecodingTest, TimestampWrap) {
- // Start with a timestamp that will soon wrap.
- std::set<uint16_t> drop_seq_numbers;
- WrapTest(0, 0xFFFFFFFF - 1000, drop_seq_numbers);
-}
-
-TEST_F(NetEqDecodingTest, TimestampAndSequenceNumberWrap) {
- // Start with a timestamp and a sequence number that will wrap at the same
- // time.
- std::set<uint16_t> drop_seq_numbers;
- WrapTest(0xFFFF - 2, 0xFFFFFFFF - 1000, drop_seq_numbers);
-}
-
-} // namespace
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/OWNERS b/chromium/third_party/webrtc/modules/audio_coding/neteq4/OWNERS
deleted file mode 100644
index d54559c4ad8..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-henrik.lundin@webrtc.org
-tina.legrand@webrtc.org
-turaj@webrtc.org
-minyue@webrtc.org
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer.h
deleted file mode 100644
index d08b64f4921..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/dtmf_buffer.h
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DTMF_BUFFER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DTMF_BUFFER_H_
-
-#include <list>
-#include <string> // size_t
-
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-struct DtmfEvent {
- uint32_t timestamp;
- int event_no;
- int volume;
- int duration;
- bool end_bit;
-
- // Constructors
- DtmfEvent()
- : timestamp(0),
- event_no(0),
- volume(0),
- duration(0),
- end_bit(false) {
- }
- DtmfEvent(uint32_t ts, int ev, int vol, int dur, bool end)
- : timestamp(ts),
- event_no(ev),
- volume(vol),
- duration(dur),
- end_bit(end) {
- }
-};
-
-// This is the buffer holding DTMF events while waiting for them to be played.
-class DtmfBuffer {
- public:
- enum BufferReturnCodes {
- kOK = 0,
- kInvalidPointer,
- kPayloadTooShort,
- kInvalidEventParameters,
- kInvalidSampleRate
- };
-
- // Set up the buffer for use at sample rate |fs_hz|.
- explicit DtmfBuffer(int fs_hz) {
- SetSampleRate(fs_hz);
- }
-
- virtual ~DtmfBuffer() {}
-
- // Flushes the buffer.
- virtual void Flush() { buffer_.clear(); }
-
- // Static method to parse 4 bytes from |payload| as a DTMF event (RFC 4733)
- // and write the parsed information into the struct |event|. Input variable
- // |rtp_timestamp| is simply copied into the struct.
- static int ParseEvent(uint32_t rtp_timestamp,
- const uint8_t* payload,
- int payload_length_bytes,
- DtmfEvent* event);
-
- // Inserts |event| into the buffer. The method looks for a matching event and
- // merges the two if a match is found.
- virtual int InsertEvent(const DtmfEvent& event);
-
- // Checks if a DTMF event should be played at time |current_timestamp|. If so,
- // the method returns true; otherwise false. The parameters of the event to
- // play will be written to |event|.
- virtual bool GetEvent(uint32_t current_timestamp, DtmfEvent* event);
-
- // Number of events in the buffer.
- virtual size_t Length() const { return buffer_.size(); }
-
- virtual bool Empty() const { return buffer_.empty(); }
-
- // Set a new sample rate.
- virtual int SetSampleRate(int fs_hz);
-
- private:
- typedef std::list<DtmfEvent> DtmfList;
-
- int max_extrapolation_samples_;
- int frame_len_samples_; // TODO(hlundin): Remove this later.
-
- // Compares two events and returns true if they are the same.
- static bool SameEvent(const DtmfEvent& a, const DtmfEvent& b);
-
- // Merges |event| to the event pointed out by |it|. The method checks that
- // the two events are the same (using the SameEvent method), and merges them
- // if that was the case, returning true. If the events are not the same, false
- // is returned.
- bool MergeEvents(DtmfList::iterator it, const DtmfEvent& event);
-
- // Method used by the sort algorithm to rank events in the buffer.
- static bool CompareEvents(const DtmfEvent& a, const DtmfEvent& b);
-
- DtmfList buffer_;
-
- DISALLOW_COPY_AND_ASSIGN(DtmfBuffer);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_DTMF_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq.cc
deleted file mode 100644
index 1ec71a2a6fb..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq.cc
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-
-#include "webrtc/modules/audio_coding/neteq4/buffer_level_filter.h"
-#include "webrtc/modules/audio_coding/neteq4/decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_manager.h"
-#include "webrtc/modules/audio_coding/neteq4/delay_peak_detector.h"
-#include "webrtc/modules/audio_coding/neteq4/dtmf_buffer.h"
-#include "webrtc/modules/audio_coding/neteq4/dtmf_tone_generator.h"
-#include "webrtc/modules/audio_coding/neteq4/neteq_impl.h"
-#include "webrtc/modules/audio_coding/neteq4/packet_buffer.h"
-#include "webrtc/modules/audio_coding/neteq4/payload_splitter.h"
-#include "webrtc/modules/audio_coding/neteq4/timestamp_scaler.h"
-
-namespace webrtc {
-
-// Creates all classes needed and inject them into a new NetEqImpl object.
-// Return the new object.
-NetEq* NetEq::Create(int sample_rate_hz) {
- BufferLevelFilter* buffer_level_filter = new BufferLevelFilter;
- DecoderDatabase* decoder_database = new DecoderDatabase;
- DelayPeakDetector* delay_peak_detector = new DelayPeakDetector;
- DelayManager* delay_manager = new DelayManager(kMaxNumPacketsInBuffer,
- delay_peak_detector);
- DtmfBuffer* dtmf_buffer = new DtmfBuffer(sample_rate_hz);
- DtmfToneGenerator* dtmf_tone_generator = new DtmfToneGenerator;
- PacketBuffer* packet_buffer = new PacketBuffer(kMaxNumPacketsInBuffer,
- kMaxBytesInBuffer);
- PayloadSplitter* payload_splitter = new PayloadSplitter;
- TimestampScaler* timestamp_scaler = new TimestampScaler(*decoder_database);
- return new NetEqImpl(sample_rate_hz,
- buffer_level_filter,
- decoder_database,
- delay_manager,
- delay_peak_detector,
- dtmf_buffer,
- dtmf_tone_generator,
- packet_buffer,
- payload_splitter,
- timestamp_scaler);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq.gypi b/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq.gypi
deleted file mode 100644
index 41fdb31ea6b..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq.gypi
+++ /dev/null
@@ -1,220 +0,0 @@
-# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-{
- 'variables': {
- 'neteq_dependencies': [
- 'G711',
- 'G722',
- 'PCM16B',
- 'iLBC',
- 'iSAC',
- 'iSACFix',
- 'CNG',
- '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
- '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
- ],
- 'neteq_defines': [],
- 'conditions': [
- ['include_opus==1', {
- 'neteq_dependencies': ['webrtc_opus',],
- 'neteq_defines': ['WEBRTC_CODEC_OPUS',],
- }],
- ],
- },
- 'targets': [
- {
- 'target_name': 'NetEq4',
- 'type': 'static_library',
- 'dependencies': [
- '<@(neteq_dependencies)',
- ],
- 'defines': [
- '<@(neteq_defines)',
- ],
- 'include_dirs': [
- 'interface',
- '<(webrtc_root)',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- 'interface',
- '<(webrtc_root)',
- ],
- },
- 'sources': [
- 'interface/audio_decoder.h',
- 'interface/neteq.h',
- 'accelerate.cc',
- 'accelerate.h',
- 'audio_decoder_impl.cc',
- 'audio_decoder_impl.h',
- 'audio_decoder.cc',
- 'audio_multi_vector.cc',
- 'audio_multi_vector.h',
- 'audio_vector.cc',
- 'audio_vector.h',
- 'background_noise.cc',
- 'background_noise.h',
- 'buffer_level_filter.cc',
- 'buffer_level_filter.h',
- 'comfort_noise.cc',
- 'comfort_noise.h',
- 'decision_logic.cc',
- 'decision_logic.h',
- 'decision_logic_fax.cc',
- 'decision_logic_fax.h',
- 'decision_logic_normal.cc',
- 'decision_logic_normal.h',
- 'decoder_database.cc',
- 'decoder_database.h',
- 'defines.h',
- 'delay_manager.cc',
- 'delay_manager.h',
- 'delay_peak_detector.cc',
- 'delay_peak_detector.h',
- 'dsp_helper.cc',
- 'dsp_helper.h',
- 'dtmf_buffer.cc',
- 'dtmf_buffer.h',
- 'dtmf_tone_generator.cc',
- 'dtmf_tone_generator.h',
- 'expand.cc',
- 'expand.h',
- 'merge.cc',
- 'merge.h',
- 'neteq_impl.cc',
- 'neteq_impl.h',
- 'neteq.cc',
- 'statistics_calculator.cc',
- 'statistics_calculator.h',
- 'normal.cc',
- 'normal.h',
- 'packet_buffer.cc',
- 'packet_buffer.h',
- 'payload_splitter.cc',
- 'payload_splitter.h',
- 'post_decode_vad.cc',
- 'post_decode_vad.h',
- 'preemptive_expand.cc',
- 'preemptive_expand.h',
- 'random_vector.cc',
- 'random_vector.h',
- 'rtcp.cc',
- 'rtcp.h',
- 'sync_buffer.cc',
- 'sync_buffer.h',
- 'timestamp_scaler.cc',
- 'timestamp_scaler.h',
- 'time_stretch.cc',
- 'time_stretch.h',
- ],
- },
- ], # targets
- 'conditions': [
- ['include_tests==1', {
- 'includes': ['neteq_tests.gypi',],
- 'targets': [
- {
- 'target_name': 'audio_decoder_unittests',
- 'type': '<(gtest_target_type)',
- 'dependencies': [
- '<@(neteq_dependencies)',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- '<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
- '<(webrtc_root)/test/test.gyp:test_support_main',
- ],
- 'defines': [
- 'AUDIO_DECODER_UNITTEST',
- 'WEBRTC_CODEC_G722',
- 'WEBRTC_CODEC_ILBC',
- 'WEBRTC_CODEC_ISACFX',
- 'WEBRTC_CODEC_ISAC',
- 'WEBRTC_CODEC_PCM16',
- '<@(neteq_defines)',
- ],
- 'sources': [
- 'audio_decoder_impl.cc',
- 'audio_decoder_impl.h',
- 'audio_decoder_unittest.cc',
- 'audio_decoder.cc',
- 'interface/audio_decoder.h',
- ],
- 'conditions': [
- # TODO(henrike): remove build_with_chromium==1 when the bots are
- # using Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
- 'dependencies': [
- '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
- ],
- }],
- ],
- }, # audio_decoder_unittests
-
- {
- 'target_name': 'neteq_unittest_tools',
- 'type': 'static_library',
- 'dependencies': [
- '<(DEPTH)/testing/gmock.gyp:gmock',
- '<(DEPTH)/testing/gtest.gyp:gtest',
- '<(webrtc_root)/test/test.gyp:test_support_main',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- 'tools',
- ],
- },
- 'include_dirs': [
- 'tools',
- ],
- 'sources': [
- 'tools/audio_loop.cc',
- 'tools/audio_loop.h',
- 'tools/input_audio_file.cc',
- 'tools/input_audio_file.h',
- 'tools/rtp_generator.cc',
- 'tools/rtp_generator.h',
- ],
- }, # neteq_unittest_tools
- ], # targets
- 'conditions': [
- # TODO(henrike): remove build_with_chromium==1 when the bots are using
- # Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
- 'targets': [
- {
- 'target_name': 'audio_decoder_unittests_apk_target',
- 'type': 'none',
- 'dependencies': [
- '<(apk_tests_path):audio_decoder_unittests_apk',
- ],
- },
- ],
- }],
- ['test_isolation_mode != "noop"', {
- 'targets': [
- {
- 'target_name': 'audio_decoder_unittests_run',
- 'type': 'none',
- 'dependencies': [
- 'audio_decoder_unittests',
- ],
- 'includes': [
- '../../../build/isolate.gypi',
- 'audio_decoder_unittests.isolate',
- ],
- 'sources': [
- 'audio_decoder_unittests.isolate',
- ],
- },
- ],
- }],
- ],
- }], # include_tests
- ], # conditions
-}
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl_unittest.cc
deleted file mode 100644
index 7a82053918c..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/neteq_impl_unittest.cc
+++ /dev/null
@@ -1,229 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/modules/audio_coding/neteq4/neteq_impl.h"
-
-#include "gmock/gmock.h"
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_audio_decoder.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_buffer_level_filter.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_decoder_database.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_delay_manager.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_delay_peak_detector.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_buffer.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_dtmf_tone_generator.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_packet_buffer.h"
-#include "webrtc/modules/audio_coding/neteq4/mock/mock_payload_splitter.h"
-#include "webrtc/modules/audio_coding/neteq4/timestamp_scaler.h"
-
-using ::testing::Return;
-using ::testing::ReturnNull;
-using ::testing::_;
-using ::testing::SetArgPointee;
-using ::testing::InSequence;
-using ::testing::Invoke;
-using ::testing::WithArg;
-
-namespace webrtc {
-
-// This function is called when inserting a packet list into the mock packet
-// buffer. The purpose is to delete all inserted packets properly, to avoid
-// memory leaks in the test.
-int DeletePacketsAndReturnOk(PacketList* packet_list) {
- PacketBuffer::DeleteAllPackets(packet_list);
- return PacketBuffer::kOK;
-}
-
-class NetEqImplTest : public ::testing::Test {
- protected:
- static const int kInitSampleRateHz = 8000;
- NetEqImplTest() {
- buffer_level_filter_ = new MockBufferLevelFilter;
- decoder_database_ = new MockDecoderDatabase;
- delay_peak_detector_ = new MockDelayPeakDetector;
- EXPECT_CALL(*delay_peak_detector_, Reset()).Times(1);
- delay_manager_ = new MockDelayManager(NetEq::kMaxNumPacketsInBuffer,
- delay_peak_detector_);
- dtmf_buffer_ = new MockDtmfBuffer(kInitSampleRateHz);
- dtmf_tone_generator_ = new MockDtmfToneGenerator;
- packet_buffer_ = new MockPacketBuffer(NetEq::kMaxNumPacketsInBuffer,
- NetEq::kMaxBytesInBuffer);
- payload_splitter_ = new MockPayloadSplitter;
- timestamp_scaler_ = new TimestampScaler(*decoder_database_);
- EXPECT_CALL(*decoder_database_, GetActiveCngDecoder())
- .WillOnce(ReturnNull());
- neteq_ = new NetEqImpl(kInitSampleRateHz,
- buffer_level_filter_,
- decoder_database_,
- delay_manager_,
- delay_peak_detector_,
- dtmf_buffer_,
- dtmf_tone_generator_,
- packet_buffer_,
- payload_splitter_,
- timestamp_scaler_);
- }
-
- virtual ~NetEqImplTest() {
- EXPECT_CALL(*buffer_level_filter_, Die()).Times(1);
- EXPECT_CALL(*decoder_database_, Die()).Times(1);
- EXPECT_CALL(*delay_manager_, Die()).Times(1);
- EXPECT_CALL(*delay_peak_detector_, Die()).Times(1);
- EXPECT_CALL(*dtmf_buffer_, Die()).Times(1);
- EXPECT_CALL(*dtmf_tone_generator_, Die()).Times(1);
- EXPECT_CALL(*packet_buffer_, Die()).Times(1);
- delete neteq_;
- }
-
- NetEqImpl* neteq_;
- MockBufferLevelFilter* buffer_level_filter_;
- MockDecoderDatabase* decoder_database_;
- MockDelayPeakDetector* delay_peak_detector_;
- MockDelayManager* delay_manager_;
- MockDtmfBuffer* dtmf_buffer_;
- MockDtmfToneGenerator* dtmf_tone_generator_;
- MockPacketBuffer* packet_buffer_;
- MockPayloadSplitter* payload_splitter_;
- TimestampScaler* timestamp_scaler_;
-};
-
-
-// This tests the interface class NetEq.
-// TODO(hlundin): Move to separate file?
-TEST(NetEq, CreateAndDestroy) {
- NetEq* neteq = NetEq::Create(8000);
- delete neteq;
-}
-
-TEST_F(NetEqImplTest, RegisterPayloadType) {
- uint8_t rtp_payload_type = 0;
- NetEqDecoder codec_type = kDecoderPCMu;
- EXPECT_CALL(*decoder_database_,
- RegisterPayload(rtp_payload_type, codec_type));
- neteq_->RegisterPayloadType(codec_type, rtp_payload_type);
-}
-
-TEST_F(NetEqImplTest, RemovePayloadType) {
- uint8_t rtp_payload_type = 0;
- EXPECT_CALL(*decoder_database_,
- Remove(rtp_payload_type))
- .WillOnce(Return(DecoderDatabase::kDecoderNotFound));
- // Check that kFail is returned when database returns kDecoderNotFound.
- EXPECT_EQ(NetEq::kFail, neteq_->RemovePayloadType(rtp_payload_type));
-}
-
-TEST_F(NetEqImplTest, InsertPacket) {
- const int kPayloadLength = 100;
- const uint8_t kPayloadType = 0;
- const uint16_t kFirstSequenceNumber = 0x1234;
- const uint32_t kFirstTimestamp = 0x12345678;
- const uint32_t kSsrc = 0x87654321;
- const uint32_t kFirstReceiveTime = 17;
- uint8_t payload[kPayloadLength] = {0};
- WebRtcRTPHeader rtp_header;
- rtp_header.header.payloadType = kPayloadType;
- rtp_header.header.sequenceNumber = kFirstSequenceNumber;
- rtp_header.header.timestamp = kFirstTimestamp;
- rtp_header.header.ssrc = kSsrc;
-
- // Create a mock decoder object.
- MockAudioDecoder mock_decoder;
- // BWE update function called with first packet.
- EXPECT_CALL(mock_decoder, IncomingPacket(_,
- kPayloadLength,
- kFirstSequenceNumber,
- kFirstTimestamp,
- kFirstReceiveTime));
- // BWE update function called with second packet.
- EXPECT_CALL(mock_decoder, IncomingPacket(_,
- kPayloadLength,
- kFirstSequenceNumber + 1,
- kFirstTimestamp + 160,
- kFirstReceiveTime + 155));
- EXPECT_CALL(mock_decoder, Die()).Times(1); // Called when deleted.
-
- // Expectations for decoder database.
- EXPECT_CALL(*decoder_database_, IsRed(kPayloadType))
- .WillRepeatedly(Return(false)); // This is not RED.
- EXPECT_CALL(*decoder_database_, CheckPayloadTypes(_))
- .Times(2)
- .WillRepeatedly(Return(DecoderDatabase::kOK)); // Payload type is valid.
- EXPECT_CALL(*decoder_database_, IsDtmf(kPayloadType))
- .WillRepeatedly(Return(false)); // This is not DTMF.
- EXPECT_CALL(*decoder_database_, GetDecoder(kPayloadType))
- .Times(3)
- .WillRepeatedly(Return(&mock_decoder));
- EXPECT_CALL(*decoder_database_, IsComfortNoise(kPayloadType))
- .WillRepeatedly(Return(false)); // This is not CNG.
- DecoderDatabase::DecoderInfo info;
- info.codec_type = kDecoderPCMu;
- EXPECT_CALL(*decoder_database_, GetDecoderInfo(kPayloadType))
- .WillRepeatedly(Return(&info));
-
- // Expectations for packet buffer.
- EXPECT_CALL(*packet_buffer_, NumPacketsInBuffer())
- .WillOnce(Return(0)) // First packet.
- .WillOnce(Return(1)) // Second packet.
- .WillOnce(Return(2)); // Second packet, checking after it was inserted.
- EXPECT_CALL(*packet_buffer_, Flush())
- .Times(1);
- EXPECT_CALL(*packet_buffer_, InsertPacketList(_, _, _, _))
- .Times(2)
- .WillRepeatedly(DoAll(SetArgPointee<2>(kPayloadType),
- WithArg<0>(Invoke(DeletePacketsAndReturnOk))));
- // SetArgPointee<2>(kPayloadType) means that the third argument (zero-based
- // index) is a pointer, and the variable pointed to is set to kPayloadType.
- // Also invoke the function DeletePacketsAndReturnOk to properly delete all
- // packets in the list (to avoid memory leaks in the test).
- EXPECT_CALL(*packet_buffer_, NextRtpHeader())
- .Times(1)
- .WillOnce(Return(&rtp_header.header));
-
- // Expectations for DTMF buffer.
- EXPECT_CALL(*dtmf_buffer_, Flush())
- .Times(1);
-
- // Expectations for delay manager.
- {
- // All expectations within this block must be called in this specific order.
- InSequence sequence; // Dummy variable.
- // Expectations when the first packet is inserted.
- EXPECT_CALL(*delay_manager_, LastDecoderType(kDecoderPCMu))
- .Times(1);
- EXPECT_CALL(*delay_manager_, last_pack_cng_or_dtmf())
- .Times(2)
- .WillRepeatedly(Return(-1));
- EXPECT_CALL(*delay_manager_, set_last_pack_cng_or_dtmf(0))
- .Times(1);
- EXPECT_CALL(*delay_manager_, ResetPacketIatCount()).Times(1);
- // Expectations when the second packet is inserted. Slightly different.
- EXPECT_CALL(*delay_manager_, LastDecoderType(kDecoderPCMu))
- .Times(1);
- EXPECT_CALL(*delay_manager_, last_pack_cng_or_dtmf())
- .WillOnce(Return(0));
- }
-
- // Expectations for payload splitter.
- EXPECT_CALL(*payload_splitter_, SplitAudio(_, _))
- .Times(2)
- .WillRepeatedly(Return(PayloadSplitter::kOK));
-
- // Insert first packet.
- neteq_->InsertPacket(rtp_header, payload, kPayloadLength, kFirstReceiveTime);
-
- // Insert second packet.
- rtp_header.header.timestamp += 160;
- rtp_header.header.sequenceNumber += 1;
- neteq_->InsertPacket(rtp_header, payload, kPayloadLength,
- kFirstReceiveTime + 155);
-}
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer.h
deleted file mode 100644
index e964c28f207..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/packet_buffer.h
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PACKET_BUFFER_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PACKET_BUFFER_H_
-
-#include "webrtc/modules/audio_coding/neteq4/packet.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declaration.
-class DecoderDatabase;
-
-// This is the actual buffer holding the packets before decoding.
-class PacketBuffer {
- public:
- enum BufferReturnCodes {
- kOK = 0,
- kFlushed,
- kNotFound,
- kBufferEmpty,
- kInvalidPacket,
- kInvalidPointer,
- kOversizePacket
- };
-
- // Constructor creates a buffer which can hold a maximum of
- // |max_number_of_packets| packets and |max_payload_memory| bytes of payload,
- // excluding RTP headers.
- PacketBuffer(size_t max_number_of_packets, size_t max_payload_memory);
-
- // Deletes all packets in the buffer before destroying the buffer.
- virtual ~PacketBuffer();
-
- // Flushes the buffer and deletes all packets in it.
- virtual void Flush();
-
- // Returns true for an empty buffer.
- virtual bool Empty() const { return buffer_.empty(); }
-
- // Inserts |packet| into the buffer. The buffer will take over ownership of
- // the packet object.
- // Returns PacketBuffer::kOK on success, PacketBuffer::kFlushed if the buffer
- // was flushed due to overfilling.
- virtual int InsertPacket(Packet* packet);
-
- // Inserts a list of packets into the buffer. The buffer will take over
- // ownership of the packet objects.
- // Returns PacketBuffer::kOK if all packets were inserted successfully.
- // If the buffer was flushed due to overfilling, only a subset of the list is
- // inserted, and PacketBuffer::kFlushed is returned.
- // The last three parameters are included for legacy compatibility.
- // TODO(hlundin): Redesign to not use current_*_payload_type and
- // decoder_database.
- virtual int InsertPacketList(PacketList* packet_list,
- const DecoderDatabase& decoder_database,
- uint8_t* current_rtp_payload_type,
- uint8_t* current_cng_rtp_payload_type);
-
- // Gets the timestamp for the first packet in the buffer and writes it to the
- // output variable |next_timestamp|.
- // Returns PacketBuffer::kBufferEmpty if the buffer is empty,
- // PacketBuffer::kOK otherwise.
- virtual int NextTimestamp(uint32_t* next_timestamp) const;
-
- // Gets the timestamp for the first packet in the buffer with a timestamp no
- // lower than the input limit |timestamp|. The result is written to the output
- // variable |next_timestamp|.
- // Returns PacketBuffer::kBufferEmpty if the buffer is empty,
- // PacketBuffer::kOK otherwise.
- virtual int NextHigherTimestamp(uint32_t timestamp,
- uint32_t* next_timestamp) const;
-
- // Returns a (constant) pointer the RTP header of the first packet in the
- // buffer. Returns NULL if the buffer is empty.
- virtual const RTPHeader* NextRtpHeader() const;
-
- // Extracts the first packet in the buffer and returns a pointer to it.
- // Returns NULL if the buffer is empty. The caller is responsible for deleting
- // the packet.
- // Subsequent packets with the same timestamp as the one extracted will be
- // discarded and properly deleted. The number of discarded packets will be
- // written to the output variable |discard_count|.
- virtual Packet* GetNextPacket(int* discard_count);
-
- // Discards the first packet in the buffer. The packet is deleted.
- // Returns PacketBuffer::kBufferEmpty if the buffer is empty,
- // PacketBuffer::kOK otherwise.
- virtual int DiscardNextPacket();
-
- // Discards all packets that are (strictly) older than |timestamp_limit|.
- // Returns number of packets discarded.
- virtual int DiscardOldPackets(uint32_t timestamp_limit);
-
- // Returns the number of packets in the buffer, including duplicates and
- // redundant packets.
- virtual int NumPacketsInBuffer() const {
- return static_cast<int>(buffer_.size());
- }
-
- // Returns the number of samples in the buffer, including samples carried in
- // duplicate and redundant packets.
- virtual int NumSamplesInBuffer(DecoderDatabase* decoder_database,
- int last_decoded_length) const;
-
- // Increase the waiting time counter for every packet in the buffer by |inc|.
- // The default value for |inc| is 1.
- virtual void IncrementWaitingTimes(int inc = 1);
-
- virtual void BufferStat(int* num_packets,
- int* max_num_packets,
- int* current_memory_bytes,
- int* max_memory_bytes) const;
-
- virtual int current_memory_bytes() const { return current_memory_bytes_; }
-
- // Static method that properly deletes the first packet, and its payload
- // array, in |packet_list|. Returns false if |packet_list| already was empty,
- // otherwise true.
- static bool DeleteFirstPacket(PacketList* packet_list);
-
- // Static method that properly deletes all packets, and their payload arrays,
- // in |packet_list|.
- static void DeleteAllPackets(PacketList* packet_list);
-
- private:
- size_t max_number_of_packets_;
- size_t max_memory_bytes_;
- int current_memory_bytes_;
- PacketList buffer_;
- DISALLOW_COPY_AND_ASSIGN(PacketBuffer);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_PACKET_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/rtcp.h b/chromium/third_party/webrtc/modules/audio_coding/neteq4/rtcp.h
deleted file mode 100644
index 00cbbd15836..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/rtcp.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_CODING_NETEQ4_RTCP_H_
-#define WEBRTC_MODULES_AUDIO_CODING_NETEQ4_RTCP_H_
-
-#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-// Forward declaration.
-struct RTPHeader;
-
-class Rtcp {
- public:
- Rtcp() {
- Init(0);
- }
-
- ~Rtcp() {}
-
- // Resets the RTCP statistics, and sets the first received sequence number.
- void Init(uint16_t start_sequence_number);
-
- // Updates the RTCP statistics with a new received packet.
- void Update(const RTPHeader& rtp_header, uint32_t receive_timestamp);
-
- // Returns the current RTCP statistics. If |no_reset| is true, the statistics
- // are not reset, otherwise they are.
- void GetStatistics(bool no_reset, RtcpStatistics* stats);
-
- private:
- uint16_t cycles_; // The number of wrap-arounds for the sequence number.
- uint16_t max_seq_no_; // The maximum sequence number received. Starts over
- // from 0 after wrap-around.
- uint16_t base_seq_no_; // The sequence number of the first received packet.
- uint32_t received_packets_; // The number of packets that have been received.
- uint32_t received_packets_prior_; // Number of packets received when last
- // report was generated.
- uint32_t expected_prior_; // Expected number of packets, at the time of the
- // last report.
- uint32_t jitter_; // Current jitter value.
- int32_t transit_; // Clock difference for previous packet.
-
- DISALLOW_COPY_AND_ASSIGN(Rtcp);
-};
-
-} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_CODING_NETEQ4_RTCP_H_
diff --git a/chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch_unittest.cc b/chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch_unittest.cc
deleted file mode 100644
index cf8131f3a06..00000000000
--- a/chromium/third_party/webrtc/modules/audio_coding/neteq4/time_stretch_unittest.cc
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-// Unit tests for Accelerate and PreemptiveExpand classes.
-
-#include "webrtc/modules/audio_coding/neteq4/accelerate.h"
-#include "webrtc/modules/audio_coding/neteq4/preemptive_expand.h"
-
-#include "gtest/gtest.h"
-#include "webrtc/modules/audio_coding/neteq4/background_noise.h"
-
-namespace webrtc {
-
-TEST(TimeStretch, CreateAndDestroy) {
- int sample_rate = 8000;
- size_t num_channels = 1;
- BackgroundNoise bgn(num_channels);
- Accelerate accelerate(sample_rate, num_channels, bgn);
- PreemptiveExpand preemptive_expand(sample_rate, num_channels, bgn);
-}
-
-// TODO(hlundin): Write more tests.
-
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h b/chromium/third_party/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h
index 352537d6ef7..2969ecebe66 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/interface/audio_conference_mixer.h
@@ -57,7 +57,7 @@ public:
// Add/remove participants as candidates for mixing.
virtual int32_t SetMixabilityStatus(MixerParticipant& participant,
- const bool mixable) = 0;
+ bool mixable) = 0;
// mixable is set to true if a participant is a candidate for mixing.
virtual int32_t MixabilityStatus(MixerParticipant& participant,
bool& mixable) = 0;
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/OWNERS b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
index c5cf137f53c..26ef3e881f2 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
@@ -19,6 +19,13 @@
namespace webrtc {
namespace {
+struct ParticipantFramePair {
+ MixerParticipant* participant;
+ AudioFrame* audioFrame;
+};
+
+typedef std::list<ParticipantFramePair*> ParticipantFramePairList;
+
// Mix |frame| into |mixed_frame|, with saturation protection and upmixing.
// These effects are applied to |frame| itself prior to mixing. Assumes that
// |mixed_frame| always has at least as many channels as |frame|. Supports
@@ -40,20 +47,18 @@ void MixFrames(AudioFrame* mixed_frame, AudioFrame* frame) {
}
// Return the max number of channels from a |list| composed of AudioFrames.
-int MaxNumChannels(const ListWrapper& list) {
- ListItem* item = list.First();
+int MaxNumChannels(const AudioFrameList* list) {
int max_num_channels = 1;
- while (item) {
- AudioFrame* frame = static_cast<AudioFrame*>(item->GetItem());
- max_num_channels = std::max(max_num_channels, frame->num_channels_);
- item = list.Next(item);
+ for (AudioFrameList::const_iterator iter = list->begin();
+ iter != list->end();
+ ++iter) {
+ max_num_channels = std::max(max_num_channels, (*iter)->num_channels_);
}
return max_num_channels;
}
void SetParticipantStatistics(ParticipantStatistics* stats,
- const AudioFrame& frame)
-{
+ const AudioFrame& frame) {
stats->participant = frame.id_;
stats->level = 0; // TODO(andrew): to what should this be set?
}
@@ -61,58 +66,47 @@ void SetParticipantStatistics(ParticipantStatistics* stats,
} // namespace
MixerParticipant::MixerParticipant()
- : _mixHistory(new MixHistory())
-{
+ : _mixHistory(new MixHistory()) {
}
-MixerParticipant::~MixerParticipant()
-{
+MixerParticipant::~MixerParticipant() {
delete _mixHistory;
}
-int32_t MixerParticipant::IsMixed(bool& mixed) const
-{
+int32_t MixerParticipant::IsMixed(bool& mixed) const {
return _mixHistory->IsMixed(mixed);
}
MixHistory::MixHistory()
- : _isMixed(0)
-{
+ : _isMixed(0) {
}
-MixHistory::~MixHistory()
-{
+MixHistory::~MixHistory() {
}
-int32_t MixHistory::IsMixed(bool& mixed) const
-{
+int32_t MixHistory::IsMixed(bool& mixed) const {
mixed = _isMixed;
return 0;
}
-int32_t MixHistory::WasMixed(bool& wasMixed) const
-{
+int32_t MixHistory::WasMixed(bool& wasMixed) const {
// Was mixed is the same as is mixed depending on perspective. This function
// is for the perspective of AudioConferenceMixerImpl.
return IsMixed(wasMixed);
}
-int32_t MixHistory::SetIsMixed(const bool mixed)
-{
+int32_t MixHistory::SetIsMixed(const bool mixed) {
_isMixed = mixed;
return 0;
}
-void MixHistory::ResetMixedStatus()
-{
+void MixHistory::ResetMixedStatus() {
_isMixed = false;
}
-AudioConferenceMixer* AudioConferenceMixer::Create(int id)
-{
+AudioConferenceMixer* AudioConferenceMixer::Create(int id) {
AudioConferenceMixerImpl* mixer = new AudioConferenceMixerImpl(id);
- if(!mixer->Init())
- {
+ if(!mixer->Init()) {
delete mixer;
return NULL;
}
@@ -140,11 +134,9 @@ AudioConferenceMixerImpl::AudioConferenceMixerImpl(int id)
_timeStamp(0),
_timeScheduler(kProcessPeriodicityInMs),
_mixedAudioLevel(),
- _processCalls(0)
-{}
+ _processCalls(0) {}
-bool AudioConferenceMixerImpl::Init()
-{
+bool AudioConferenceMixerImpl::Init() {
_crit.reset(CriticalSectionWrapper::CreateCriticalSection());
if (_crit.get() == NULL)
return false;
@@ -153,8 +145,10 @@ bool AudioConferenceMixerImpl::Init()
if(_cbCrit.get() == NULL)
return false;
- _limiter.reset(AudioProcessing::Create(_id));
- if(_limiter.get() == NULL)
+ Config config;
+ config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
+ _limiter.reset(AudioProcessing::Create(config));
+ if(!_limiter.get())
return false;
MemoryPool<AudioFrame>::CreateMemoryPool(_audioFramePool,
@@ -165,10 +159,6 @@ bool AudioConferenceMixerImpl::Init()
if(SetOutputFrequency(kDefaultFrequency) == -1)
return false;
- // Assume mono.
- if (!SetNumLimiterChannels(1))
- return false;
-
if(_limiter->gain_control()->set_mode(GainControl::kFixedDigital) !=
_limiter->kNoError)
return false;
@@ -192,25 +182,21 @@ bool AudioConferenceMixerImpl::Init()
return true;
}
-AudioConferenceMixerImpl::~AudioConferenceMixerImpl()
-{
+AudioConferenceMixerImpl::~AudioConferenceMixerImpl() {
MemoryPool<AudioFrame>::DeleteMemoryPool(_audioFramePool);
assert(_audioFramePool == NULL);
}
-int32_t AudioConferenceMixerImpl::ChangeUniqueId(const int32_t id)
-{
+int32_t AudioConferenceMixerImpl::ChangeUniqueId(const int32_t id) {
_id = id;
return 0;
}
// Process should be called every kProcessPeriodicityInMs ms
-int32_t AudioConferenceMixerImpl::TimeUntilNextProcess()
-{
+int32_t AudioConferenceMixerImpl::TimeUntilNextProcess() {
int32_t timeUntilNextProcess = 0;
CriticalSectionScoped cs(_crit.get());
- if(_timeScheduler.TimeToNextUpdate(timeUntilNextProcess) != 0)
- {
+ if(_timeScheduler.TimeToNextUpdate(timeUntilNextProcess) != 0) {
WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
"failed in TimeToNextUpdate() call");
// Sanity check
@@ -220,9 +206,8 @@ int32_t AudioConferenceMixerImpl::TimeUntilNextProcess()
return timeUntilNextProcess;
}
-int32_t AudioConferenceMixerImpl::Process()
-{
- uint32_t remainingParticipantsAllowedToMix =
+int32_t AudioConferenceMixerImpl::Process() {
+ size_t remainingParticipantsAllowedToMix =
kMaximumAmountOfMixedParticipants;
{
CriticalSectionScoped cs(_crit.get());
@@ -233,9 +218,9 @@ int32_t AudioConferenceMixerImpl::Process()
_timeScheduler.UpdateScheduler();
}
- ListWrapper mixList;
- ListWrapper rampOutList;
- ListWrapper additionalFramesList;
+ AudioFrameList mixList;
+ AudioFrameList rampOutList;
+ AudioFrameList additionalFramesList;
std::map<int, MixerParticipant*> mixedParticipantsMap;
{
CriticalSectionScoped cs(_cbCrit.get());
@@ -246,41 +231,34 @@ int32_t AudioConferenceMixerImpl::Process()
// information.
// TODO(henrike): this is probably more appropriate to do in
// GetLowestMixingFrequency().
- if (lowFreq == 12000)
- {
+ if (lowFreq == 12000) {
lowFreq = 16000;
} else if (lowFreq == 24000) {
lowFreq = 32000;
}
- if(lowFreq <= 0)
- {
+ if(lowFreq <= 0) {
CriticalSectionScoped cs(_crit.get());
_processCalls--;
return 0;
- } else {
- switch(lowFreq)
- {
+ } else {
+ switch(lowFreq) {
case 8000:
- if(OutputFrequency() != kNbInHz)
- {
+ if(OutputFrequency() != kNbInHz) {
SetOutputFrequency(kNbInHz);
}
break;
case 16000:
- if(OutputFrequency() != kWbInHz)
- {
+ if(OutputFrequency() != kWbInHz) {
SetOutputFrequency(kWbInHz);
}
break;
case 32000:
- if(OutputFrequency() != kSwbInHz)
- {
+ if(OutputFrequency() != kSwbInHz) {
SetOutputFrequency(kSwbInHz);
}
break;
case 48000:
- if(OutputFrequency() != kFbInHz)
- {
+ if(OutputFrequency() != kFbInHz) {
SetOutputFrequency(kFbInHz);
}
break;
@@ -293,19 +271,17 @@ int32_t AudioConferenceMixerImpl::Process()
}
}
- UpdateToMix(mixList, rampOutList, &mixedParticipantsMap,
+ UpdateToMix(&mixList, &rampOutList, &mixedParticipantsMap,
remainingParticipantsAllowedToMix);
- GetAdditionalAudio(additionalFramesList);
+ GetAdditionalAudio(&additionalFramesList);
UpdateMixedStatus(mixedParticipantsMap);
- _scratchParticipantsToMixAmount =
- static_cast<uint32_t>(mixedParticipantsMap.size());
+ _scratchParticipantsToMixAmount = mixedParticipantsMap.size();
}
// Get an AudioFrame for mixing from the memory pool.
AudioFrame* mixedAudio = NULL;
- if(_audioFramePool->PopMemory(mixedAudio) == -1)
- {
+ if(_audioFramePool->PopMemory(mixedAudio) == -1) {
WEBRTC_TRACE(kTraceMemory, kTraceAudioMixerServer, _id,
"failed PopMemory() call");
assert(false);
@@ -322,12 +298,9 @@ int32_t AudioConferenceMixerImpl::Process()
// with an API instead of dynamically.
// Find the max channels over all mixing lists.
- const int num_mixed_channels = std::max(MaxNumChannels(mixList),
- std::max(MaxNumChannels(additionalFramesList),
- MaxNumChannels(rampOutList)));
-
- if (!SetNumLimiterChannels(num_mixed_channels))
- retval = -1;
+ const int num_mixed_channels = std::max(MaxNumChannels(&mixList),
+ std::max(MaxNumChannels(&additionalFramesList),
+ MaxNumChannels(&rampOutList)));
mixedAudio->UpdateFrame(-1, _timeStamp, NULL, 0, _outputFrequency,
AudioFrame::kNormalSpeech,
@@ -335,18 +308,15 @@ int32_t AudioConferenceMixerImpl::Process()
_timeStamp += _sampleSize;
- MixFromList(*mixedAudio, mixList);
- MixAnonomouslyFromList(*mixedAudio, additionalFramesList);
- MixAnonomouslyFromList(*mixedAudio, rampOutList);
+ MixFromList(*mixedAudio, &mixList);
+ MixAnonomouslyFromList(*mixedAudio, &additionalFramesList);
+ MixAnonomouslyFromList(*mixedAudio, &rampOutList);
- if(mixedAudio->samples_per_channel_ == 0)
- {
+ if(mixedAudio->samples_per_channel_ == 0) {
// Nothing was mixed, set the audio samples to silence.
mixedAudio->samples_per_channel_ = _sampleSize;
mixedAudio->Mute();
- }
- else
- {
+ } else {
// Only call the limiter if we have something to mix.
if(!LimitMixedAudio(*mixedAudio))
retval = -1;
@@ -355,12 +325,10 @@ int32_t AudioConferenceMixerImpl::Process()
_mixedAudioLevel.ComputeLevel(mixedAudio->data_,_sampleSize);
audioLevel = _mixedAudioLevel.GetLevel();
- if(_mixerStatusCb)
- {
+ if(_mixerStatusCb) {
_scratchVadPositiveParticipantsAmount = 0;
- UpdateVADPositiveParticipants(mixList);
- if(_amountOf10MsUntilNextCallback-- == 0)
- {
+ UpdateVADPositiveParticipants(&mixList);
+ if(_amountOf10MsUntilNextCallback-- == 0) {
_amountOf10MsUntilNextCallback = _amountOf10MsBetweenCallbacks;
timeForMixerCallback = true;
}
@@ -369,8 +337,7 @@ int32_t AudioConferenceMixerImpl::Process()
{
CriticalSectionScoped cs(_cbCrit.get());
- if(_mixReceiver != NULL)
- {
+ if(_mixReceiver != NULL) {
const AudioFrame** dummy = NULL;
_mixReceiver->NewMixedAudio(
_id,
@@ -380,12 +347,11 @@ int32_t AudioConferenceMixerImpl::Process()
}
if((_mixerStatusCallback != NULL) &&
- timeForMixerCallback)
- {
+ timeForMixerCallback) {
_mixerStatusCallback->MixedParticipants(
_id,
_scratchMixedParticipants,
- _scratchParticipantsToMixAmount);
+ static_cast<uint32_t>(_scratchParticipantsToMixAmount));
_mixerStatusCallback->VADPositiveParticipants(
_id,
@@ -397,9 +363,9 @@ int32_t AudioConferenceMixerImpl::Process()
// Reclaim all outstanding memory.
_audioFramePool->PushMemory(mixedAudio);
- ClearAudioFrameList(mixList);
- ClearAudioFrameList(rampOutList);
- ClearAudioFrameList(additionalFramesList);
+ ClearAudioFrameList(&mixList);
+ ClearAudioFrameList(&rampOutList);
+ ClearAudioFrameList(&additionalFramesList);
{
CriticalSectionScoped cs(_crit.get());
_processCalls--;
@@ -408,22 +374,18 @@ int32_t AudioConferenceMixerImpl::Process()
}
int32_t AudioConferenceMixerImpl::RegisterMixedStreamCallback(
- AudioMixerOutputReceiver& mixReceiver)
-{
+ AudioMixerOutputReceiver& mixReceiver) {
CriticalSectionScoped cs(_cbCrit.get());
- if(_mixReceiver != NULL)
- {
+ if(_mixReceiver != NULL) {
return -1;
}
_mixReceiver = &mixReceiver;
return 0;
}
-int32_t AudioConferenceMixerImpl::UnRegisterMixedStreamCallback()
-{
+int32_t AudioConferenceMixerImpl::UnRegisterMixedStreamCallback() {
CriticalSectionScoped cs(_cbCrit.get());
- if(_mixReceiver == NULL)
- {
+ if(_mixReceiver == NULL) {
return -1;
}
_mixReceiver = NULL;
@@ -431,16 +393,8 @@ int32_t AudioConferenceMixerImpl::UnRegisterMixedStreamCallback()
}
int32_t AudioConferenceMixerImpl::SetOutputFrequency(
- const Frequency frequency)
-{
+ const Frequency frequency) {
CriticalSectionScoped cs(_crit.get());
- const int error = _limiter->set_sample_rate_hz(frequency);
- if(error != _limiter->kNoError)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
- "Error from AudioProcessing: %d", error);
- return -1;
- }
_outputFrequency = frequency;
_sampleSize = (_outputFrequency*kProcessPeriodicityInMs) / 1000;
@@ -449,36 +403,15 @@ int32_t AudioConferenceMixerImpl::SetOutputFrequency(
}
AudioConferenceMixer::Frequency
-AudioConferenceMixerImpl::OutputFrequency() const
-{
+AudioConferenceMixerImpl::OutputFrequency() const {
CriticalSectionScoped cs(_crit.get());
return _outputFrequency;
}
-bool AudioConferenceMixerImpl::SetNumLimiterChannels(int numChannels)
-{
- if(_limiter->num_input_channels() != numChannels)
- {
- const int error = _limiter->set_num_channels(numChannels,
- numChannels);
- if(error != _limiter->kNoError)
- {
- WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
- "Error from AudioProcessing: %d", error);
- assert(false);
- return false;
- }
- }
-
- return true;
-}
-
int32_t AudioConferenceMixerImpl::RegisterMixerStatusCallback(
AudioMixerStatusReceiver& mixerStatusCallback,
- const uint32_t amountOf10MsBetweenCallbacks)
-{
- if(amountOf10MsBetweenCallbacks == 0)
- {
+ const uint32_t amountOf10MsBetweenCallbacks) {
+ if(amountOf10MsBetweenCallbacks == 0) {
WEBRTC_TRACE(
kTraceWarning,
kTraceAudioMixerServer,
@@ -488,8 +421,7 @@ int32_t AudioConferenceMixerImpl::RegisterMixerStatusCallback(
}
{
CriticalSectionScoped cs(_cbCrit.get());
- if(_mixerStatusCallback != NULL)
- {
+ if(_mixerStatusCallback != NULL) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
"Mixer status callback already registered");
return -1;
@@ -505,8 +437,7 @@ int32_t AudioConferenceMixerImpl::RegisterMixerStatusCallback(
return 0;
}
-int32_t AudioConferenceMixerImpl::UnRegisterMixerStatusCallback()
-{
+int32_t AudioConferenceMixerImpl::UnRegisterMixerStatusCallback() {
{
CriticalSectionScoped cs(_crit.get());
if(!_mixerStatusCb)
@@ -526,38 +457,31 @@ int32_t AudioConferenceMixerImpl::UnRegisterMixerStatusCallback()
int32_t AudioConferenceMixerImpl::SetMixabilityStatus(
MixerParticipant& participant,
- const bool mixable)
-{
- if (!mixable)
- {
+ bool mixable) {
+ if (!mixable) {
// Anonymous participants are in a separate list. Make sure that the
// participant is in the _participantList if it is being mixed.
SetAnonymousMixabilityStatus(participant, false);
}
- uint32_t numMixedParticipants;
+ size_t numMixedParticipants;
{
CriticalSectionScoped cs(_cbCrit.get());
const bool isMixed =
- IsParticipantInList(participant,_participantList);
+ IsParticipantInList(participant, &_participantList);
// API must be called with a new state.
- if(!(mixable ^ isMixed))
- {
+ if(!(mixable ^ isMixed)) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
"Mixable is aready %s",
isMixed ? "ON" : "off");
return -1;
}
bool success = false;
- if(mixable)
- {
- success = AddParticipantToList(participant,_participantList);
- }
- else
- {
- success = RemoveParticipantFromList(participant,_participantList);
+ if(mixable) {
+ success = AddParticipantToList(participant, &_participantList);
+ } else {
+ success = RemoveParticipantFromList(participant, &_participantList);
}
- if(!success)
- {
+ if(!success) {
WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
"failed to %s participant",
mixable ? "add" : "remove");
@@ -565,13 +489,12 @@ int32_t AudioConferenceMixerImpl::SetMixabilityStatus(
return -1;
}
- int numMixedNonAnonymous = _participantList.GetSize();
- if (numMixedNonAnonymous > kMaximumAmountOfMixedParticipants)
- {
+ size_t numMixedNonAnonymous = _participantList.size();
+ if (numMixedNonAnonymous > kMaximumAmountOfMixedParticipants) {
numMixedNonAnonymous = kMaximumAmountOfMixedParticipants;
}
- numMixedParticipants = numMixedNonAnonymous +
- _additionalParticipantList.GetSize();
+ numMixedParticipants =
+ numMixedNonAnonymous + _additionalParticipantList.size();
}
// A MixerParticipant was added or removed. Make sure the scratch
// buffer is updated if necessary.
@@ -583,40 +506,34 @@ int32_t AudioConferenceMixerImpl::SetMixabilityStatus(
int32_t AudioConferenceMixerImpl::MixabilityStatus(
MixerParticipant& participant,
- bool& mixable)
-{
+ bool& mixable) {
CriticalSectionScoped cs(_cbCrit.get());
- mixable = IsParticipantInList(participant, _participantList);
+ mixable = IsParticipantInList(participant, &_participantList);
return 0;
}
int32_t AudioConferenceMixerImpl::SetAnonymousMixabilityStatus(
- MixerParticipant& participant, const bool anonymous)
-{
+ MixerParticipant& participant, const bool anonymous) {
CriticalSectionScoped cs(_cbCrit.get());
- if(IsParticipantInList(participant, _additionalParticipantList))
- {
- if(anonymous)
- {
+ if(IsParticipantInList(participant, &_additionalParticipantList)) {
+ if(anonymous) {
return 0;
}
- if(!RemoveParticipantFromList(participant, _additionalParticipantList))
- {
+ if(!RemoveParticipantFromList(participant,
+ &_additionalParticipantList)) {
WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
"unable to remove participant from anonymous list");
assert(false);
return -1;
}
- return AddParticipantToList(participant, _participantList) ? 0 : -1;
+ return AddParticipantToList(participant, &_participantList) ? 0 : -1;
}
- if(!anonymous)
- {
+ if(!anonymous) {
return 0;
}
const bool mixable = RemoveParticipantFromList(participant,
- _participantList);
- if(!mixable)
- {
+ &_participantList);
+ if(!mixable) {
WEBRTC_TRACE(
kTraceWarning,
kTraceAudioMixerServer,
@@ -626,39 +543,33 @@ int32_t AudioConferenceMixerImpl::SetAnonymousMixabilityStatus(
// already registered.
return -1;
}
- return AddParticipantToList(participant, _additionalParticipantList) ?
+ return AddParticipantToList(participant, &_additionalParticipantList) ?
0 : -1;
}
int32_t AudioConferenceMixerImpl::AnonymousMixabilityStatus(
- MixerParticipant& participant, bool& mixable)
-{
+ MixerParticipant& participant, bool& mixable) {
CriticalSectionScoped cs(_cbCrit.get());
mixable = IsParticipantInList(participant,
- _additionalParticipantList);
+ &_additionalParticipantList);
return 0;
}
int32_t AudioConferenceMixerImpl::SetMinimumMixingFrequency(
- Frequency freq)
-{
+ Frequency freq) {
// Make sure that only allowed sampling frequencies are used. Use closest
// higher sampling frequency to avoid losing information.
- if (static_cast<int>(freq) == 12000)
- {
+ if (static_cast<int>(freq) == 12000) {
freq = kWbInHz;
} else if (static_cast<int>(freq) == 24000) {
freq = kSwbInHz;
}
if((freq == kNbInHz) || (freq == kWbInHz) || (freq == kSwbInHz) ||
- (freq == kLowestPossible))
- {
+ (freq == kLowestPossible)) {
_minimumMixingFreq=freq;
return 0;
- }
- else
- {
+ } else {
WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
"SetMinimumMixingFrequency incorrect frequency: %i",freq);
assert(false);
@@ -668,20 +579,17 @@ int32_t AudioConferenceMixerImpl::SetMinimumMixingFrequency(
// Check all AudioFrames that are to be mixed. The highest sampling frequency
// found is the lowest that can be used without losing information.
-int32_t AudioConferenceMixerImpl::GetLowestMixingFrequency()
-{
+int32_t AudioConferenceMixerImpl::GetLowestMixingFrequency() {
const int participantListFrequency =
- GetLowestMixingFrequencyFromList(_participantList);
+ GetLowestMixingFrequencyFromList(&_participantList);
const int anonymousListFrequency =
- GetLowestMixingFrequencyFromList(_additionalParticipantList);
+ GetLowestMixingFrequencyFromList(&_additionalParticipantList);
const int highestFreq =
(participantListFrequency > anonymousListFrequency) ?
participantListFrequency : anonymousListFrequency;
// Check if the user specified a lowest mixing frequency.
- if(_minimumMixingFreq != kLowestPossible)
- {
- if(_minimumMixingFreq > highestFreq)
- {
+ if(_minimumMixingFreq != kLowestPossible) {
+ if(_minimumMixingFreq > highestFreq) {
return _minimumMixingFreq;
}
}
@@ -689,60 +597,47 @@ int32_t AudioConferenceMixerImpl::GetLowestMixingFrequency()
}
int32_t AudioConferenceMixerImpl::GetLowestMixingFrequencyFromList(
- ListWrapper& mixList)
-{
+ MixerParticipantList* mixList) {
int32_t highestFreq = 8000;
- ListItem* item = mixList.First();
- while(item)
- {
- MixerParticipant* participant =
- static_cast<MixerParticipant*>(item->GetItem());
- const int32_t neededFrequency = participant->NeededFrequency(_id);
- if(neededFrequency > highestFreq)
- {
+ for (MixerParticipantList::iterator iter = mixList->begin();
+ iter != mixList->end();
+ ++iter) {
+ const int32_t neededFrequency = (*iter)->NeededFrequency(_id);
+ if(neededFrequency > highestFreq) {
highestFreq = neededFrequency;
}
- item = mixList.Next(item);
}
return highestFreq;
}
void AudioConferenceMixerImpl::UpdateToMix(
- ListWrapper& mixList,
- ListWrapper& rampOutList,
+ AudioFrameList* mixList,
+ AudioFrameList* rampOutList,
std::map<int, MixerParticipant*>* mixParticipantList,
- uint32_t& maxAudioFrameCounter) {
+ size_t& maxAudioFrameCounter) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"UpdateToMix(mixList,rampOutList,mixParticipantList,%d)",
maxAudioFrameCounter);
- const uint32_t mixListStartSize = mixList.GetSize();
- ListWrapper activeList; // Elements are AudioFrames
+ const size_t mixListStartSize = mixList->size();
+ AudioFrameList activeList;
// Struct needed by the passive lists to keep track of which AudioFrame
// belongs to which MixerParticipant.
- struct ParticipantFramePair
- {
- MixerParticipant* participant;
- AudioFrame* audioFrame;
- };
- ListWrapper passiveWasNotMixedList; // Elements are MixerParticipant
- ListWrapper passiveWasMixedList; // Elements are MixerParticipant
- ListItem* item = _participantList.First();
- while(item)
- {
+ ParticipantFramePairList passiveWasNotMixedList;
+ ParticipantFramePairList passiveWasMixedList;
+ for (MixerParticipantList::iterator participant = _participantList.begin();
+ participant != _participantList.end();
+ ++participant) {
// Stop keeping track of passive participants if there are already
// enough participants available (they wont be mixed anyway).
bool mustAddToPassiveList = (maxAudioFrameCounter >
- (activeList.GetSize() +
- passiveWasMixedList.GetSize() +
- passiveWasNotMixedList.GetSize()));
+ (activeList.size() +
+ passiveWasMixedList.size() +
+ passiveWasNotMixedList.size()));
- MixerParticipant* participant = static_cast<MixerParticipant*>(
- item->GetItem());
bool wasMixed = false;
- participant->_mixHistory->WasMixed(wasMixed);
+ (*participant)->_mixHistory->WasMixed(wasMixed);
AudioFrame* audioFrame = NULL;
- if(_audioFramePool->PopMemory(audioFrame) == -1)
- {
+ if(_audioFramePool->PopMemory(audioFrame) == -1) {
WEBRTC_TRACE(kTraceMemory, kTraceAudioMixerServer, _id,
"failed PopMemory() call");
assert(false);
@@ -750,56 +645,51 @@ void AudioConferenceMixerImpl::UpdateToMix(
}
audioFrame->sample_rate_hz_ = _outputFrequency;
- if(participant->GetAudioFrame(_id,*audioFrame) != 0)
- {
+ if((*participant)->GetAudioFrame(_id,*audioFrame) != 0) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
"failed to GetAudioFrame() from participant");
_audioFramePool->PushMemory(audioFrame);
- item = _participantList.Next(item);
continue;
}
+ if (_participantList.size() != 1) {
+ // TODO(wu): Issue 3390, add support for multiple participants case.
+ audioFrame->ntp_time_ms_ = -1;
+ }
+
// TODO(henrike): this assert triggers in some test cases where SRTP is
// used which prevents NetEQ from making a VAD. Temporarily disable this
// assert until the problem is fixed on a higher level.
// assert(audioFrame->vad_activity_ != AudioFrame::kVadUnknown);
- if (audioFrame->vad_activity_ == AudioFrame::kVadUnknown)
- {
+ if (audioFrame->vad_activity_ == AudioFrame::kVadUnknown) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
"invalid VAD state from participant");
}
- if(audioFrame->vad_activity_ == AudioFrame::kVadActive)
- {
- if(!wasMixed)
- {
+ if(audioFrame->vad_activity_ == AudioFrame::kVadActive) {
+ if(!wasMixed) {
RampIn(*audioFrame);
}
- if(activeList.GetSize() >= maxAudioFrameCounter)
- {
+ if(activeList.size() >= maxAudioFrameCounter) {
// There are already more active participants than should be
// mixed. Only keep the ones with the highest energy.
- ListItem* replaceItem = NULL;
+ AudioFrameList::iterator replaceItem;
CalculateEnergy(*audioFrame);
uint32_t lowestEnergy = audioFrame->energy_;
- ListItem* activeItem = activeList.First();
- while(activeItem)
- {
- AudioFrame* replaceFrame = static_cast<AudioFrame*>(
- activeItem->GetItem());
- CalculateEnergy(*replaceFrame);
- if(replaceFrame->energy_ < lowestEnergy)
- {
- replaceItem = activeItem;
- lowestEnergy = replaceFrame->energy_;
+ bool found_replace_item = false;
+ for (AudioFrameList::iterator iter = activeList.begin();
+ iter != activeList.end();
+ ++iter) {
+ CalculateEnergy(**iter);
+ if((*iter)->energy_ < lowestEnergy) {
+ replaceItem = iter;
+ lowestEnergy = (*iter)->energy_;
+ found_replace_item = true;
}
- activeItem = activeList.Next(activeItem);
}
- if(replaceItem != NULL)
- {
- AudioFrame* replaceFrame = static_cast<AudioFrame*>(
- replaceItem->GetItem());
+ if(found_replace_item) {
+ AudioFrame* replaceFrame = *replaceItem;
bool replaceWasMixed = false;
std::map<int, MixerParticipant*>::iterator it =
@@ -812,255 +702,219 @@ void AudioConferenceMixerImpl::UpdateToMix(
it->second->_mixHistory->WasMixed(replaceWasMixed);
mixParticipantList->erase(replaceFrame->id_);
- activeList.Erase(replaceItem);
+ activeList.erase(replaceItem);
- activeList.PushFront(static_cast<void*>(audioFrame));
- (*mixParticipantList)[audioFrame->id_] = participant;
+ activeList.push_front(audioFrame);
+ (*mixParticipantList)[audioFrame->id_] = *participant;
assert(mixParticipantList->size() <=
kMaximumAmountOfMixedParticipants);
if (replaceWasMixed) {
RampOut(*replaceFrame);
- rampOutList.PushBack(static_cast<void*>(replaceFrame));
- assert(rampOutList.GetSize() <=
+ rampOutList->push_back(replaceFrame);
+ assert(rampOutList->size() <=
kMaximumAmountOfMixedParticipants);
} else {
_audioFramePool->PushMemory(replaceFrame);
}
} else {
- if(wasMixed)
- {
+ if(wasMixed) {
RampOut(*audioFrame);
- rampOutList.PushBack(static_cast<void*>(audioFrame));
- assert(rampOutList.GetSize() <=
+ rampOutList->push_back(audioFrame);
+ assert(rampOutList->size() <=
kMaximumAmountOfMixedParticipants);
} else {
_audioFramePool->PushMemory(audioFrame);
}
}
} else {
- activeList.PushFront(static_cast<void*>(audioFrame));
- (*mixParticipantList)[audioFrame->id_] = participant;
+ activeList.push_front(audioFrame);
+ (*mixParticipantList)[audioFrame->id_] = *participant;
assert(mixParticipantList->size() <=
kMaximumAmountOfMixedParticipants);
}
} else {
- if(wasMixed)
- {
+ if(wasMixed) {
ParticipantFramePair* pair = new ParticipantFramePair;
pair->audioFrame = audioFrame;
- pair->participant = participant;
- passiveWasMixedList.PushBack(static_cast<void*>(pair));
+ pair->participant = *participant;
+ passiveWasMixedList.push_back(pair);
} else if(mustAddToPassiveList) {
RampIn(*audioFrame);
ParticipantFramePair* pair = new ParticipantFramePair;
pair->audioFrame = audioFrame;
- pair->participant = participant;
- passiveWasNotMixedList.PushBack(static_cast<void*>(pair));
+ pair->participant = *participant;
+ passiveWasNotMixedList.push_back(pair);
} else {
_audioFramePool->PushMemory(audioFrame);
}
}
- item = _participantList.Next(item);
}
- assert(activeList.GetSize() <= maxAudioFrameCounter);
+ assert(activeList.size() <= maxAudioFrameCounter);
// At this point it is known which participants should be mixed. Transfer
// this information to this functions output parameters.
- while(!activeList.Empty())
- {
- ListItem* mixItem = activeList.First();
- mixList.PushBack(mixItem->GetItem());
- activeList.Erase(mixItem);
+ for (AudioFrameList::iterator iter = activeList.begin();
+ iter != activeList.end();
+ ++iter) {
+ mixList->push_back(*iter);
}
+ activeList.clear();
// Always mix a constant number of AudioFrames. If there aren't enough
// active participants mix passive ones. Starting with those that was mixed
// last iteration.
- while(!passiveWasMixedList.Empty())
- {
- ListItem* mixItem = passiveWasMixedList.First();
- ParticipantFramePair* pair = static_cast<ParticipantFramePair*>(
- mixItem->GetItem());
- if(mixList.GetSize() < maxAudioFrameCounter + mixListStartSize)
- {
- mixList.PushBack(pair->audioFrame);
- (*mixParticipantList)[pair->audioFrame->id_] =
- pair->participant;
+ for (ParticipantFramePairList::iterator iter = passiveWasMixedList.begin();
+ iter != passiveWasMixedList.end();
+ ++iter) {
+ if(mixList->size() < maxAudioFrameCounter + mixListStartSize) {
+ mixList->push_back((*iter)->audioFrame);
+ (*mixParticipantList)[(*iter)->audioFrame->id_] =
+ (*iter)->participant;
assert(mixParticipantList->size() <=
kMaximumAmountOfMixedParticipants);
+ } else {
+ _audioFramePool->PushMemory((*iter)->audioFrame);
}
- else
- {
- _audioFramePool->PushMemory(pair->audioFrame);
- }
- delete pair;
- passiveWasMixedList.Erase(mixItem);
+ delete *iter;
}
// And finally the ones that have not been mixed for a while.
- while(!passiveWasNotMixedList.Empty())
- {
- ListItem* mixItem = passiveWasNotMixedList.First();
- ParticipantFramePair* pair = static_cast<ParticipantFramePair*>(
- mixItem->GetItem());
- if(mixList.GetSize() < maxAudioFrameCounter + mixListStartSize)
- {
- mixList.PushBack(pair->audioFrame);
- (*mixParticipantList)[pair->audioFrame->id_] = pair->participant;
+ for (ParticipantFramePairList::iterator iter =
+ passiveWasNotMixedList.begin();
+ iter != passiveWasNotMixedList.end();
+ ++iter) {
+ if(mixList->size() < maxAudioFrameCounter + mixListStartSize) {
+ mixList->push_back((*iter)->audioFrame);
+ (*mixParticipantList)[(*iter)->audioFrame->id_] =
+ (*iter)->participant;
assert(mixParticipantList->size() <=
kMaximumAmountOfMixedParticipants);
+ } else {
+ _audioFramePool->PushMemory((*iter)->audioFrame);
}
- else
- {
- _audioFramePool->PushMemory(pair->audioFrame);
- }
- delete pair;
- passiveWasNotMixedList.Erase(mixItem);
+ delete *iter;
}
- assert(maxAudioFrameCounter + mixListStartSize >= mixList.GetSize());
- maxAudioFrameCounter += mixListStartSize - mixList.GetSize();
+ assert(maxAudioFrameCounter + mixListStartSize >= mixList->size());
+ maxAudioFrameCounter += mixListStartSize - mixList->size();
}
void AudioConferenceMixerImpl::GetAdditionalAudio(
- ListWrapper& additionalFramesList)
-{
+ AudioFrameList* additionalFramesList) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"GetAdditionalAudio(additionalFramesList)");
- ListItem* item = _additionalParticipantList.First();
- while(item)
- {
- // The GetAudioFrame() callback may remove the current item. Store the
- // next item just in case that happens.
- ListItem* nextItem = _additionalParticipantList.Next(item);
-
- MixerParticipant* participant = static_cast<MixerParticipant*>(
- item->GetItem());
+ // The GetAudioFrame() callback may result in the participant being removed
+ // from additionalParticipantList_. If that happens it will invalidate any
+ // iterators. Create a copy of the participants list such that the list of
+ // participants can be traversed safely.
+ MixerParticipantList additionalParticipantList;
+ additionalParticipantList.insert(additionalParticipantList.begin(),
+ _additionalParticipantList.begin(),
+ _additionalParticipantList.end());
+
+ for (MixerParticipantList::iterator participant =
+ additionalParticipantList.begin();
+ participant != additionalParticipantList.end();
+ ++participant) {
AudioFrame* audioFrame = NULL;
- if(_audioFramePool->PopMemory(audioFrame) == -1)
- {
+ if(_audioFramePool->PopMemory(audioFrame) == -1) {
WEBRTC_TRACE(kTraceMemory, kTraceAudioMixerServer, _id,
"failed PopMemory() call");
assert(false);
return;
}
audioFrame->sample_rate_hz_ = _outputFrequency;
- if(participant->GetAudioFrame(_id, *audioFrame) != 0)
- {
+ if((*participant)->GetAudioFrame(_id, *audioFrame) != 0) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
"failed to GetAudioFrame() from participant");
_audioFramePool->PushMemory(audioFrame);
- item = nextItem;
continue;
}
- if(audioFrame->samples_per_channel_ == 0)
- {
+ if(audioFrame->samples_per_channel_ == 0) {
// Empty frame. Don't use it.
_audioFramePool->PushMemory(audioFrame);
- item = nextItem;
continue;
}
- additionalFramesList.PushBack(static_cast<void*>(audioFrame));
- item = nextItem;
+ additionalFramesList->push_back(audioFrame);
}
}
void AudioConferenceMixerImpl::UpdateMixedStatus(
- std::map<int, MixerParticipant*>& mixedParticipantsMap)
-{
+ std::map<int, MixerParticipant*>& mixedParticipantsMap) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"UpdateMixedStatus(mixedParticipantsMap)");
assert(mixedParticipantsMap.size() <= kMaximumAmountOfMixedParticipants);
// Loop through all participants. If they are in the mix map they
// were mixed.
- ListItem* participantItem = _participantList.First();
- while(participantItem != NULL)
- {
+ for (MixerParticipantList::iterator participant = _participantList.begin();
+ participant != _participantList.end();
+ ++participant) {
bool isMixed = false;
- MixerParticipant* participant =
- static_cast<MixerParticipant*>(participantItem->GetItem());
-
for (std::map<int, MixerParticipant*>::iterator it =
mixedParticipantsMap.begin();
it != mixedParticipantsMap.end();
++it) {
- if (it->second == participant) {
+ if (it->second == *participant) {
isMixed = true;
break;
}
}
- participant->_mixHistory->SetIsMixed(isMixed);
- participantItem = _participantList.Next(participantItem);
+ (*participant)->_mixHistory->SetIsMixed(isMixed);
}
}
-void AudioConferenceMixerImpl::ClearAudioFrameList(ListWrapper& audioFrameList)
-{
+void AudioConferenceMixerImpl::ClearAudioFrameList(
+ AudioFrameList* audioFrameList) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"ClearAudioFrameList(audioFrameList)");
- ListItem* item = audioFrameList.First();
- while(item)
- {
- AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
- _audioFramePool->PushMemory(audioFrame);
- audioFrameList.Erase(item);
- item = audioFrameList.First();
+ for (AudioFrameList::iterator iter = audioFrameList->begin();
+ iter != audioFrameList->end();
+ ++iter) {
+ _audioFramePool->PushMemory(*iter);
}
+ audioFrameList->clear();
}
void AudioConferenceMixerImpl::UpdateVADPositiveParticipants(
- ListWrapper& mixList)
-{
+ AudioFrameList* mixList) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"UpdateVADPositiveParticipants(mixList)");
- ListItem* item = mixList.First();
- while(item != NULL)
- {
- AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
- CalculateEnergy(*audioFrame);
- if(audioFrame->vad_activity_ == AudioFrame::kVadActive)
- {
+ for (AudioFrameList::iterator iter = mixList->begin();
+ iter != mixList->end();
+ ++iter) {
+ CalculateEnergy(**iter);
+ if((*iter)->vad_activity_ == AudioFrame::kVadActive) {
_scratchVadPositiveParticipants[
_scratchVadPositiveParticipantsAmount].participant =
- audioFrame->id_;
+ (*iter)->id_;
// TODO(andrew): to what should this be set?
_scratchVadPositiveParticipants[
_scratchVadPositiveParticipantsAmount].level = 0;
_scratchVadPositiveParticipantsAmount++;
}
- item = mixList.Next(item);
}
}
bool AudioConferenceMixerImpl::IsParticipantInList(
MixerParticipant& participant,
- ListWrapper& participantList)
-{
+ MixerParticipantList* participantList) const {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"IsParticipantInList(participant,participantList)");
- ListItem* item = participantList.First();
- while(item != NULL)
- {
- MixerParticipant* rhsParticipant =
- static_cast<MixerParticipant*>(item->GetItem());
- if(&participant == rhsParticipant)
- {
+ for (MixerParticipantList::const_iterator iter = participantList->begin();
+ iter != participantList->end();
+ ++iter) {
+ if(&participant == *iter) {
return true;
}
- item = participantList.Next(item);
}
return false;
}
bool AudioConferenceMixerImpl::AddParticipantToList(
MixerParticipant& participant,
- ListWrapper& participantList)
-{
+ MixerParticipantList* participantList) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"AddParticipantToList(participant, participantList)");
- if(participantList.PushBack(static_cast<void*>(&participant)) == -1)
- {
- return false;
- }
+ participantList->push_back(&participant);
// Make sure that the mixed status is correct for new MixerParticipant.
participant._mixHistory->ResetMixedStatus();
return true;
@@ -1068,52 +922,53 @@ bool AudioConferenceMixerImpl::AddParticipantToList(
bool AudioConferenceMixerImpl::RemoveParticipantFromList(
MixerParticipant& participant,
- ListWrapper& participantList)
-{
+ MixerParticipantList* participantList) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"RemoveParticipantFromList(participant, participantList)");
- ListItem* item = participantList.First();
- while(item)
- {
- if(item->GetItem() == &participant)
- {
- participantList.Erase(item);
+ for (MixerParticipantList::iterator iter = participantList->begin();
+ iter != participantList->end();
+ ++iter) {
+ if(*iter == &participant) {
+ participantList->erase(iter);
// Participant is no longer mixed, reset to default.
participant._mixHistory->ResetMixedStatus();
return true;
}
- item = participantList.Next(item);
}
return false;
}
int32_t AudioConferenceMixerImpl::MixFromList(
AudioFrame& mixedAudio,
- const ListWrapper& audioFrameList)
-{
+ const AudioFrameList* audioFrameList) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"MixFromList(mixedAudio, audioFrameList)");
- uint32_t position = 0;
- ListItem* item = audioFrameList.First();
- if(item == NULL)
- {
- return 0;
- }
+ if(audioFrameList->empty()) return 0;
- if(_numMixedParticipants == 1)
- {
+ uint32_t position = 0;
+ if(_numMixedParticipants == 1) {
// No mixing required here; skip the saturation protection.
- AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
+ AudioFrame* audioFrame = audioFrameList->front();
mixedAudio.CopyFrom(*audioFrame);
SetParticipantStatistics(&_scratchMixedParticipants[position],
*audioFrame);
return 0;
}
- while(item != NULL)
- {
- if(position >= kMaximumAmountOfMixedParticipants)
- {
+ if (audioFrameList->size() == 1) {
+ mixedAudio.timestamp_ = audioFrameList->front()->timestamp_;
+ mixedAudio.elapsed_time_ms_ = audioFrameList->front()->elapsed_time_ms_;
+ } else {
+ // TODO(wu): Issue 3390.
+ // Audio frame timestamp is only supported in one channel case.
+ mixedAudio.timestamp_ = 0;
+ mixedAudio.elapsed_time_ms_ = -1;
+ }
+
+ for (AudioFrameList::const_iterator iter = audioFrameList->begin();
+ iter != audioFrameList->end();
+ ++iter) {
+ if(position >= kMaximumAmountOfMixedParticipants) {
WEBRTC_TRACE(
kTraceMemory,
kTraceAudioMixerServer,
@@ -1124,14 +979,12 @@ int32_t AudioConferenceMixerImpl::MixFromList(
assert(false);
position = 0;
}
- AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
- MixFrames(&mixedAudio, audioFrame);
+ MixFrames(&mixedAudio, (*iter));
SetParticipantStatistics(&_scratchMixedParticipants[position],
- *audioFrame);
+ **iter);
position++;
- item = audioFrameList.Next(item);
}
return 0;
@@ -1140,35 +993,29 @@ int32_t AudioConferenceMixerImpl::MixFromList(
// TODO(andrew): consolidate this function with MixFromList.
int32_t AudioConferenceMixerImpl::MixAnonomouslyFromList(
AudioFrame& mixedAudio,
- const ListWrapper& audioFrameList)
-{
+ const AudioFrameList* audioFrameList) {
WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
"MixAnonomouslyFromList(mixedAudio, audioFrameList)");
- ListItem* item = audioFrameList.First();
- if(item == NULL)
- return 0;
- if(_numMixedParticipants == 1)
- {
+ if(audioFrameList->empty()) return 0;
+
+ if(_numMixedParticipants == 1) {
// No mixing required here; skip the saturation protection.
- AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
+ AudioFrame* audioFrame = audioFrameList->front();
mixedAudio.CopyFrom(*audioFrame);
return 0;
}
- while(item != NULL)
- {
- AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
- MixFrames(&mixedAudio, audioFrame);
- item = audioFrameList.Next(item);
+ for (AudioFrameList::const_iterator iter = audioFrameList->begin();
+ iter != audioFrameList->end();
+ ++iter) {
+ MixFrames(&mixedAudio, *iter);
}
return 0;
}
-bool AudioConferenceMixerImpl::LimitMixedAudio(AudioFrame& mixedAudio)
-{
- if(_numMixedParticipants == 1)
- {
+bool AudioConferenceMixerImpl::LimitMixedAudio(AudioFrame& mixedAudio) {
+ if(_numMixedParticipants == 1) {
return true;
}
@@ -1187,8 +1034,7 @@ bool AudioConferenceMixerImpl::LimitMixedAudio(AudioFrame& mixedAudio)
// negative value is undefined).
mixedAudio += mixedAudio;
- if(error != _limiter->kNoError)
- {
+ if(error != _limiter->kNoError) {
WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
"Error from AudioProcessing: %d", error);
assert(false);
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
index 737acbb0ba4..31dc71e5dce 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
@@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_CONFERENCE_MIXER_IMPL_H_
#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_CONFERENCE_MIXER_IMPL_H_
+#include <list>
#include <map>
#include "webrtc/engine_configurations.h"
@@ -19,13 +20,15 @@
#include "webrtc/modules/audio_conference_mixer/source/memory_pool.h"
#include "webrtc/modules/audio_conference_mixer/source/time_scheduler.h"
#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/interface/list_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
class AudioProcessing;
class CriticalSectionWrapper;
+typedef std::list<AudioFrame*> AudioFrameList;
+typedef std::list<MixerParticipant*> MixerParticipantList;
+
// Cheshire cat implementation of MixerParticipant's non virtual functions.
class MixHistory
{
@@ -74,7 +77,7 @@ public:
const uint32_t amountOf10MsBetweenCallbacks);
virtual int32_t UnRegisterMixerStatusCallback();
virtual int32_t SetMixabilityStatus(MixerParticipant& participant,
- const bool mixable);
+ bool mixable);
virtual int32_t MixabilityStatus(MixerParticipant& participant,
bool& mixable);
virtual int32_t SetMinimumMixingFrequency(Frequency freq);
@@ -89,10 +92,6 @@ private:
int32_t SetOutputFrequency(const Frequency frequency);
Frequency OutputFrequency() const;
- // Must be called whenever an audio frame indicates the number of channels
- // has changed.
- bool SetNumLimiterChannels(int numChannels);
-
// Fills mixList with the AudioFrames pointers that should be used when
// mixing. Fills mixParticipantList with ParticipantStatistics for the
// participants who's AudioFrames are inside mixList.
@@ -102,18 +101,18 @@ private:
// used to be mixed but shouldn't be mixed any longer. These AudioFrames
// should be ramped out over this AudioFrame to avoid audio discontinuities.
void UpdateToMix(
- ListWrapper& mixList,
- ListWrapper& rampOutList,
+ AudioFrameList* mixList,
+ AudioFrameList* rampOutList,
std::map<int, MixerParticipant*>* mixParticipantList,
- uint32_t& maxAudioFrameCounter);
+ size_t& maxAudioFrameCounter);
// Return the lowest mixing frequency that can be used without having to
// downsample any audio.
int32_t GetLowestMixingFrequency();
- int32_t GetLowestMixingFrequencyFromList(ListWrapper& mixList);
+ int32_t GetLowestMixingFrequencyFromList(MixerParticipantList* mixList);
// Return the AudioFrames that should be mixed anonymously.
- void GetAdditionalAudio(ListWrapper& additionalFramesList);
+ void GetAdditionalAudio(AudioFrameList* additionalFramesList);
// Update the MixHistory of all MixerParticipants. mixedParticipantsList
// should contain a map of MixerParticipants that have been mixed.
@@ -121,44 +120,44 @@ private:
std::map<int, MixerParticipant*>& mixedParticipantsList);
// Clears audioFrameList and reclaims all memory associated with it.
- void ClearAudioFrameList(ListWrapper& audioFrameList);
+ void ClearAudioFrameList(AudioFrameList* audioFrameList);
// Update the list of MixerParticipants who have a positive VAD. mixList
// should be a list of AudioFrames
void UpdateVADPositiveParticipants(
- ListWrapper& mixList);
+ AudioFrameList* mixList);
// This function returns true if it finds the MixerParticipant in the
// specified list of MixerParticipants.
bool IsParticipantInList(
MixerParticipant& participant,
- ListWrapper& participantList);
+ MixerParticipantList* participantList) const;
// Add/remove the MixerParticipant to the specified
// MixerParticipant list.
bool AddParticipantToList(
MixerParticipant& participant,
- ListWrapper& participantList);
+ MixerParticipantList* participantList);
bool RemoveParticipantFromList(
MixerParticipant& removeParticipant,
- ListWrapper& participantList);
+ MixerParticipantList* participantList);
// Mix the AudioFrames stored in audioFrameList into mixedAudio.
int32_t MixFromList(
AudioFrame& mixedAudio,
- const ListWrapper& audioFrameList);
+ const AudioFrameList* audioFrameList);
// Mix the AudioFrames stored in audioFrameList into mixedAudio. No
// record will be kept of this mix (e.g. the corresponding MixerParticipants
// will not be marked as IsMixed()
int32_t MixAnonomouslyFromList(AudioFrame& mixedAudio,
- const ListWrapper& audioFrameList);
+ const AudioFrameList* audioFrameList);
bool LimitMixedAudio(AudioFrame& mixedAudio);
// Scratch memory
// Note that the scratch memory may only be touched in the scope of
// Process().
- uint32_t _scratchParticipantsToMixAmount;
+ size_t _scratchParticipantsToMixAmount;
ParticipantStatistics _scratchMixedParticipants[
kMaximumAmountOfMixedParticipants];
uint32_t _scratchVadPositiveParticipantsAmount;
@@ -176,9 +175,9 @@ private:
AudioMixerOutputReceiver* _mixReceiver;
AudioMixerStatusReceiver* _mixerStatusCallback;
- uint32_t _amountOf10MsBetweenCallbacks;
- uint32_t _amountOf10MsUntilNextCallback;
- bool _mixerStatusCb;
+ uint32_t _amountOf10MsBetweenCallbacks;
+ uint32_t _amountOf10MsUntilNextCallback;
+ bool _mixerStatusCb;
// The current sample frequency and sample size when mixing.
Frequency _outputFrequency;
@@ -188,10 +187,11 @@ private:
MemoryPool<AudioFrame>* _audioFramePool;
// List of all participants. Note all lists are disjunct
- ListWrapper _participantList; // May be mixed.
- ListWrapper _additionalParticipantList; // Always mixed, anonomously.
+ MixerParticipantList _participantList; // May be mixed.
+ // Always mixed, anonomously.
+ MixerParticipantList _additionalParticipantList;
- uint32_t _numMixedParticipants;
+ size_t _numMixedParticipants;
uint32_t _timeStamp;
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_frame_manipulator.cc b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_frame_manipulator.cc
index 679d608f50f..3dce5c8bea6 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_frame_manipulator.cc
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/audio_frame_manipulator.cc
@@ -41,10 +41,6 @@ const int rampSize = sizeof(rampArray)/sizeof(rampArray[0]);
namespace webrtc {
void CalculateEnergy(AudioFrame& audioFrame)
{
- if(audioFrame.energy_ != 0xffffffff)
- {
- return;
- }
audioFrame.energy_ = 0;
for(int position = 0; position < audioFrame.samples_per_channel_;
position++)
diff --git a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/memory_pool_posix.h b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/memory_pool_posix.h
index 6d4dccf8ae3..04e7cd52254 100644
--- a/chromium/third_party/webrtc/modules/audio_conference_mixer/source/memory_pool_posix.h
+++ b/chromium/third_party/webrtc/modules/audio_conference_mixer/source/memory_pool_posix.h
@@ -12,9 +12,9 @@
#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_GENERIC_H_
#include <assert.h>
+#include <list>
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/list_wrapper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -40,7 +40,7 @@ private:
bool _terminate;
- ListWrapper _memoryPool;
+ std::list<MemoryType*> _memoryPool;
uint32_t _initialPoolSize;
uint32_t _createdMemory;
@@ -51,7 +51,6 @@ template<class MemoryType>
MemoryPoolImpl<MemoryType>::MemoryPoolImpl(int32_t initialPoolSize)
: _crit(CriticalSectionWrapper::CreateCriticalSection()),
_terminate(false),
- _memoryPool(),
_initialPoolSize(initialPoolSize),
_createdMemory(0),
_outstandingMemory(0)
@@ -76,20 +75,17 @@ int32_t MemoryPoolImpl<MemoryType>::PopMemory(MemoryType*& memory)
memory = NULL;
return -1;
}
- ListItem* item = _memoryPool.First();
- if(item == NULL)
- {
+ if (_memoryPool.empty()) {
// _memoryPool empty create new memory.
CreateMemory(_initialPoolSize);
- item = _memoryPool.First();
- if(item == NULL)
+ if(_memoryPool.empty())
{
memory = NULL;
return -1;
}
}
- memory = static_cast<MemoryType*>(item->GetItem());
- _memoryPool.Erase(item);
+ memory = _memoryPool.front();
+ _memoryPool.pop_front();
_outstandingMemory++;
return 0;
}
@@ -103,7 +99,7 @@ int32_t MemoryPoolImpl<MemoryType>::PushMemory(MemoryType*& memory)
}
CriticalSectionScoped cs(_crit);
_outstandingMemory--;
- if(_memoryPool.GetSize() > (_initialPoolSize << 1))
+ if(_memoryPool.size() > (_initialPoolSize << 1))
{
// Reclaim memory if less than half of the pool is unused.
_createdMemory--;
@@ -111,7 +107,7 @@ int32_t MemoryPoolImpl<MemoryType>::PushMemory(MemoryType*& memory)
memory = NULL;
return 0;
}
- _memoryPool.PushBack(static_cast<void*>(memory));
+ _memoryPool.push_back(memory);
memory = NULL;
return 0;
}
@@ -127,21 +123,15 @@ template<class MemoryType>
int32_t MemoryPoolImpl<MemoryType>::Terminate()
{
CriticalSectionScoped cs(_crit);
- assert(_createdMemory == _outstandingMemory + _memoryPool.GetSize());
+ assert(_createdMemory == _outstandingMemory + _memoryPool.size());
_terminate = true;
// Reclaim all memory.
while(_createdMemory > 0)
{
- ListItem* item = _memoryPool.First();
- if(item == NULL)
- {
- // There is memory that hasn't been returned yet.
- return -1;
- }
- MemoryType* memory = static_cast<MemoryType*>(item->GetItem());
+ MemoryType* memory = _memoryPool.front();
+ _memoryPool.pop_front();
delete memory;
- _memoryPool.Erase(item);
_createdMemory--;
}
return 0;
@@ -158,7 +148,7 @@ int32_t MemoryPoolImpl<MemoryType>::CreateMemory(
{
return -1;
}
- _memoryPool.PushBack(static_cast<void*>(memory));
+ _memoryPool.push_back(memory);
_createdMemory++;
}
return 0;
diff --git a/chromium/third_party/webrtc/modules/audio_device/Android.mk b/chromium/third_party/webrtc/modules/audio_device/Android.mk
index affa5e1c2d4..4b3b9124d7b 100644
--- a/chromium/third_party/webrtc/modules/audio_device/Android.mk
+++ b/chromium/third_party/webrtc/modules/audio_device/Android.mk
@@ -25,7 +25,8 @@ LOCAL_SRC_FILES := \
android/audio_device_android_opensles.cc \
android/audio_device_utility_android.cc \
dummy/audio_device_utility_dummy.cc \
- dummy/audio_device_dummy.cc
+ dummy/audio_device_dummy.cc \
+ dummy/file_audio_device.cc
# Flags passed to both C and C++ files.
LOCAL_CFLAGS := \
diff --git a/chromium/third_party/webrtc/modules/audio_device/OWNERS b/chromium/third_party/webrtc/modules/audio_device/OWNERS
index a07ced37b30..7bb3cd52378 100644
--- a/chromium/third_party/webrtc/modules/audio_device/OWNERS
+++ b/chromium/third_party/webrtc/modules/audio_device/OWNERS
@@ -2,3 +2,10 @@ henrikg@webrtc.org
henrika@webrtc.org
niklas.enbom@webrtc.org
xians@webrtc.org
+
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/audio_device_template.h b/chromium/third_party/webrtc/modules/audio_device/android/audio_device_template.h
index 28112698259..f851f4703d2 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/audio_device_template.h
+++ b/chromium/third_party/webrtc/modules/audio_device/android/audio_device_template.h
@@ -30,6 +30,12 @@ class AudioDeviceTemplate : public AudioDeviceGeneric {
}
return InputType::SetAndroidAudioDeviceObjects(javaVM, env, context);
}
+
+ static void ClearAndroidAudioDeviceObjects() {
+ OutputType::ClearAndroidAudioDeviceObjects();
+ InputType::ClearAndroidAudioDeviceObjects();
+ }
+
explicit AudioDeviceTemplate(const int32_t id)
: output_(id),
input_(id, &output_) {
@@ -169,11 +175,6 @@ class AudioDeviceTemplate : public AudioDeviceGeneric {
return -1;
}
- int32_t SpeakerIsAvailable(
- bool& available) { // NOLINT
- return output_.SpeakerIsAvailable(available);
- }
-
int32_t InitSpeaker() {
return output_.InitSpeaker();
}
@@ -182,11 +183,6 @@ class AudioDeviceTemplate : public AudioDeviceGeneric {
return output_.SpeakerIsInitialized();
}
- int32_t MicrophoneIsAvailable(
- bool& available) { // NOLINT
- return input_.MicrophoneIsAvailable(available);
- }
-
int32_t InitMicrophone() {
return input_.InitMicrophone();
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer.h b/chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer.h
index 597b8aaa389..e577b72fd4b 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer.h
@@ -56,7 +56,7 @@ class FineAudioBuffer {
int bytes_per_10_ms_;
// Storage for samples that are not yet asked for.
- scoped_array<int8_t> cache_buffer_;
+ scoped_ptr<int8_t[]> cache_buffer_;
int cached_buffer_start_; // Location of first unread sample.
int cached_bytes_; // Number of bytes stored in cache.
};
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer_unittest.cc
index 69ba741d18a..e1f03f8f3c3 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/android/fine_audio_buffer_unittest.cc
@@ -80,7 +80,7 @@ void RunFineBufferTest(int sample_rate, int frame_size_in_samples) {
FineAudioBuffer fine_buffer(&audio_device_buffer, kFrameSizeBytes,
sample_rate);
- scoped_array<int8_t> out_buffer;
+ scoped_ptr<int8_t[]> out_buffer;
out_buffer.reset(
new int8_t[fine_buffer.RequiredBufferSizeBytes()]);
for (int i = 0; i < kNumberOfFrames; ++i) {
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/opensles_input.cc b/chromium/third_party/webrtc/modules/audio_device/android/opensles_input.cc
index e276fcc5230..f22d8bf7ef8 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/opensles_input.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/android/opensles_input.cc
@@ -75,6 +75,9 @@ int32_t OpenSlesInput::SetAndroidAudioDeviceObjects(void* javaVM,
return 0;
}
+void OpenSlesInput::ClearAndroidAudioDeviceObjects() {
+}
+
int32_t OpenSlesInput::Init() {
assert(!initialized_);
@@ -177,11 +180,6 @@ int32_t OpenSlesInput::SetAGC(bool enable) {
return 0;
}
-int32_t OpenSlesInput::MicrophoneIsAvailable(bool& available) { // NOLINT
- available = true;
- return 0;
-}
-
int32_t OpenSlesInput::InitMicrophone() {
assert(initialized_);
assert(!recording_);
@@ -291,7 +289,7 @@ void OpenSlesInput::AllocateBuffers() {
fifo_.reset(new SingleRwFifo(num_fifo_buffers_needed_));
// Allocate the memory area to be used.
- rec_buf_.reset(new scoped_array<int8_t>[TotalBuffersUsed()]);
+ rec_buf_.reset(new scoped_ptr<int8_t[]>[TotalBuffersUsed()]);
for (int i = 0; i < TotalBuffersUsed(); ++i) {
rec_buf_[i].reset(new int8_t[buffer_size_bytes()]);
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/opensles_input.h b/chromium/third_party/webrtc/modules/audio_device/android/opensles_input.h
index ea5144b70fe..d27d82435db 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/opensles_input.h
+++ b/chromium/third_party/webrtc/modules/audio_device/android/opensles_input.h
@@ -41,6 +41,7 @@ class OpenSlesInput {
static int32_t SetAndroidAudioDeviceObjects(void* javaVM,
void* env,
void* context);
+ static void ClearAndroidAudioDeviceObjects();
// Main initializaton and termination
int32_t Init();
@@ -76,7 +77,6 @@ class OpenSlesInput {
bool AGC() const { return agc_enabled_; }
// Audio mixer initialization
- int32_t MicrophoneIsAvailable(bool& available); // NOLINT
int32_t InitMicrophone();
bool MicrophoneIsInitialized() const { return mic_initialized_; }
@@ -205,7 +205,7 @@ class OpenSlesInput {
// Audio buffers
AudioDeviceBuffer* audio_buffer_;
// Holds all allocated memory such that it is deallocated properly.
- scoped_array<scoped_array<int8_t> > rec_buf_;
+ scoped_ptr<scoped_ptr<int8_t[]>[]> rec_buf_;
// Index in |rec_buf_| pointing to the audio buffer that will be ready the
// next time RecorderSimpleBufferQueueCallbackHandler is invoked.
// Ready means buffer contains audio data from the device.
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/opensles_output.cc b/chromium/third_party/webrtc/modules/audio_device/android/opensles_output.cc
index 3823305532d..377789b2371 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/opensles_output.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/android/opensles_output.cc
@@ -76,6 +76,10 @@ int32_t OpenSlesOutput::SetAndroidAudioDeviceObjects(void* javaVM,
return 0;
}
+void OpenSlesOutput::ClearAndroidAudioDeviceObjects() {
+ AudioManagerJni::ClearAndroidAudioDeviceObjects();
+}
+
int32_t OpenSlesOutput::Init() {
assert(!initialized_);
@@ -184,11 +188,6 @@ int32_t OpenSlesOutput::StopPlayout() {
return 0;
}
-int32_t OpenSlesOutput::SpeakerIsAvailable(bool& available) { // NOLINT
- available = true;
- return 0;
-}
-
int32_t OpenSlesOutput::InitSpeaker() {
assert(!playing_);
speaker_initialized_ = true;
@@ -341,7 +340,7 @@ void OpenSlesOutput::AllocateBuffers() {
fifo_.reset(new SingleRwFifo(num_fifo_buffers_needed_));
// Allocate the memory area to be used.
- play_buf_.reset(new scoped_array<int8_t>[TotalBuffersUsed()]);
+ play_buf_.reset(new scoped_ptr<int8_t[]>[TotalBuffersUsed()]);
int required_buffer_size = fine_buffer_->RequiredBufferSizeBytes();
for (int i = 0; i < TotalBuffersUsed(); ++i) {
play_buf_[i].reset(new int8_t[required_buffer_size]);
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/opensles_output.h b/chromium/third_party/webrtc/modules/audio_device/android/opensles_output.h
index 38ca969a019..aa9b5bf1213 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/opensles_output.h
+++ b/chromium/third_party/webrtc/modules/audio_device/android/opensles_output.h
@@ -41,6 +41,7 @@ class OpenSlesOutput : public PlayoutDelayProvider {
static int32_t SetAndroidAudioDeviceObjects(void* javaVM,
void* env,
void* context);
+ static void ClearAndroidAudioDeviceObjects();
// Main initializaton and termination
int32_t Init();
@@ -73,7 +74,6 @@ class OpenSlesOutput : public PlayoutDelayProvider {
bool Playing() const { return playing_; }
// Audio mixer initialization
- int32_t SpeakerIsAvailable(bool& available); // NOLINT
int32_t InitSpeaker();
bool SpeakerIsInitialized() const { return speaker_initialized_; }
@@ -223,7 +223,7 @@ class OpenSlesOutput : public PlayoutDelayProvider {
// Audio buffers
AudioDeviceBuffer* audio_buffer_;
scoped_ptr<FineAudioBuffer> fine_buffer_;
- scoped_array<scoped_array<int8_t> > play_buf_;
+ scoped_ptr<scoped_ptr<int8_t[]>[]> play_buf_;
// Index in |rec_buf_| pointing to the audio buffer that will be ready the
// next time PlayerSimpleBufferQueueCallbackHandler is invoked.
// Ready means buffer is ready to be played out to device.
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.cc b/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.cc
index d65ab9fbb63..73d4d61dd3f 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.cc
@@ -10,6 +10,8 @@
#include "webrtc/modules/audio_device/android/single_rw_fifo.h"
+#include <assert.h>
+
static int UpdatePos(int pos, int capacity) {
return (pos + 1) % capacity;
}
@@ -18,14 +20,16 @@ namespace webrtc {
namespace subtle {
-#if defined(__ARMEL__)
+#if defined(__aarch64__)
+// From http://http://src.chromium.org/viewvc/chrome/trunk/src/base/atomicops_internals_arm64_gcc.h
+inline void MemoryBarrier() {
+ __asm__ __volatile__ ("dmb ish" ::: "memory");
+}
+
+#elif defined(__ARMEL__)
// From http://src.chromium.org/viewvc/chrome/trunk/src/base/atomicops_internals_arm_gcc.h
-// Note that it is only the MemoryBarrier function that makes this class arm
-// specific. Borrowing other MemoryBarrier implementations, this class could
-// be extended to more platforms.
inline void MemoryBarrier() {
- // Note: This is a function call, which is also an implicit compiler
- // barrier.
+ // Note: This is a function call, which is also an implicit compiler barrier.
typedef void (*KernelMemoryBarrierFunc)();
((KernelMemoryBarrierFunc)0xffff0fa0)();
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.h b/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.h
index a1fcfaab417..092b1d5e090 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.h
+++ b/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo.h
@@ -35,7 +35,7 @@ class SingleRwFifo {
int capacity() const { return capacity_; }
private:
- scoped_array<int8_t*> queue_;
+ scoped_ptr<int8_t*[]> queue_;
int capacity_;
Atomic32 size_;
diff --git a/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo_unittest.cc b/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo_unittest.cc
index c722c2756cd..9925baaa88f 100644
--- a/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/android/single_rw_fifo_unittest.cc
@@ -90,7 +90,7 @@ class SingleRwFifoTest : public testing::Test {
protected:
SingleRwFifo fifo_;
// Memory area for proper de-allocation.
- scoped_array<int8_t> buffer_[kCapacity];
+ scoped_ptr<int8_t[]> buffer_[kCapacity];
std::list<int8_t*> memory_queue_;
int pushed_;
diff --git a/chromium/third_party/webrtc/modules/audio_device/audio_device.gypi b/chromium/third_party/webrtc/modules/audio_device/audio_device.gypi
index 1e14747f3df..a64856b5da5 100644
--- a/chromium/third_party/webrtc/modules/audio_device/audio_device.gypi
+++ b/chromium/third_party/webrtc/modules/audio_device/audio_device.gypi
@@ -20,7 +20,7 @@
'.',
'../interface',
'include',
- 'dummy', # dummy audio device
+ 'dummy', # Contains dummy audio device implementations.
],
'direct_dependent_settings': {
'include_dirs': [
@@ -45,6 +45,8 @@
'dummy/audio_device_dummy.h',
'dummy/audio_device_utility_dummy.cc',
'dummy/audio_device_utility_dummy.h',
+ 'dummy/file_audio_device.cc',
+ 'dummy/file_audio_device.h',
],
'conditions': [
['OS=="linux"', {
@@ -77,6 +79,13 @@
'WEBRTC_DUMMY_AUDIO_BUILD',
],
}],
+ ['build_with_chromium==0', {
+ 'sources': [
+ # Don't link these into Chrome since they contain static data.
+ 'dummy/file_audio_device_factory.cc',
+ 'dummy/file_audio_device_factory.h',
+ ],
+ }],
['include_internal_audio_device==1', {
'sources': [
'linux/alsasymboltable_linux.cc',
@@ -246,66 +255,6 @@
},
],
}],
- ['OS=="android"', {
- 'targets': [
- {
- 'target_name': 'libopensl-demo-jni',
- 'type': 'loadable_module',
- 'dependencies': [
- 'audio_device',
- ],
- 'sources': [
- 'android/test/jni/opensl_runner.cc',
- 'android/test/fake_audio_device_buffer.cc',
- ],
- 'link_settings': {
- 'libraries': [
- '-llog',
- '-lOpenSLES',
- ],
- },
- },
- {
- 'target_name': 'OpenSlDemo',
- 'type': 'none',
- 'dependencies': [
- 'libopensl-demo-jni',
- '<(modules_java_gyp_path):*',
- ],
- 'actions': [
- {
- # TODO(henrik): Convert building of the demo to a proper GYP
- # target so this action is not needed once chromium's
- # apk-building machinery can be used. (crbug.com/225101)
- 'action_name': 'build_opensldemo_apk',
- 'variables': {
- 'android_opensl_demo_root': '<(webrtc_root)/modules/audio_device/android/test',
- },
- 'inputs' : [
- '<(PRODUCT_DIR)/lib.java/audio_device_module_java.jar',
- '<(PRODUCT_DIR)/libopensl-demo-jni.so',
- '<!@(find <(android_opensl_demo_root)/src -name "*.java")',
- '<!@(find <(android_opensl_demo_root)/res -name "*.xml")',
- '<!@(find <(android_opensl_demo_root)/res -name "*.png")',
- '<(android_opensl_demo_root)/AndroidManifest.xml',
- '<(android_opensl_demo_root)/build.xml',
- '<(android_opensl_demo_root)/project.properties',
- ],
- 'outputs': ['<(PRODUCT_DIR)/OpenSlDemo-debug.apk'],
- 'action': ['bash', '-ec',
- 'rm -f <(_outputs) && '
- 'mkdir -p <(android_opensl_demo_root)/libs/<(android_app_abi) && '
- '<(android_strip) -o <(android_opensl_demo_root)/libs/<(android_app_abi)/libopensl-demo-jni.so <(PRODUCT_DIR)/libopensl-demo-jni.so && '
- 'cp <(PRODUCT_DIR)/lib.java/audio_device_module_java.jar <(android_opensl_demo_root)/libs/ &&'
- 'cd <(android_opensl_demo_root) && '
- 'ant debug && '
- 'cd - && '
- 'cp <(android_opensl_demo_root)/bin/OpenSlDemo-debug.apk <(_outputs)'
- ],
- },
- ],
- }],
- }],
['OS=="android" and enable_android_opensl==1', {
'targets': [
{
diff --git a/chromium/third_party/webrtc/modules/audio_device/audio_device_buffer.cc b/chromium/third_party/webrtc/modules/audio_device/audio_device_buffer.cc
index db5cc322f98..42fdaad22cb 100644
--- a/chromium/third_party/webrtc/modules/audio_device/audio_device_buffer.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/audio_device_buffer.cc
@@ -548,13 +548,16 @@ int32_t AudioDeviceBuffer::RequestPlayoutData(uint32_t nSamples)
if (_ptrCbAudioTransport)
{
uint32_t res(0);
-
+ int64_t elapsed_time_ms = -1;
+ int64_t ntp_time_ms = -1;
res = _ptrCbAudioTransport->NeedMorePlayData(_playSamples,
playBytesPerSample,
playChannels,
playSampleRate,
&_playBuffer[0],
- nSamplesOut);
+ nSamplesOut,
+ &elapsed_time_ms,
+ &ntp_time_ms);
if (res != 0)
{
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "NeedMorePlayData() failed");
diff --git a/chromium/third_party/webrtc/modules/audio_device/audio_device_generic.h b/chromium/third_party/webrtc/modules/audio_device/audio_device_generic.h
index 8038028029b..a4c320eaf75 100644
--- a/chromium/third_party/webrtc/modules/audio_device/audio_device_generic.h
+++ b/chromium/third_party/webrtc/modules/audio_device/audio_device_generic.h
@@ -76,10 +76,8 @@ class AudioDeviceGeneric
uint16_t& volumeRight) const = 0;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool& available) = 0;
virtual int32_t InitSpeaker() = 0;
virtual bool SpeakerIsInitialized() const = 0;
- virtual int32_t MicrophoneIsAvailable(bool& available) = 0;
virtual int32_t InitMicrophone() = 0;
virtual bool MicrophoneIsInitialized() const = 0;
@@ -137,7 +135,7 @@ class AudioDeviceGeneric
// CPU load
virtual int32_t CPULoad(uint16_t& load) const = 0;
-
+
// Native sample rate controls (samples/sec)
virtual int32_t SetRecordingSampleRate(
const uint32_t samplesPerSec);
@@ -147,7 +145,7 @@ class AudioDeviceGeneric
// Speaker audio routing (for mobile devices)
virtual int32_t SetLoudspeakerStatus(bool enable);
virtual int32_t GetLoudspeakerStatus(bool& enable) const;
-
+
// Reset Audio Device (for mobile devices)
virtual int32_t ResetAudioDevice();
diff --git a/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc b/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc
index 05201c0e2a5..58411e3b94d 100644
--- a/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.cc
@@ -45,8 +45,14 @@
#include "audio_device_utility_mac.h"
#include "audio_device_mac.h"
#endif
+
+#if defined(WEBRTC_DUMMY_FILE_DEVICES)
+#include "webrtc/modules/audio_device/dummy/file_audio_device_factory.h"
+#endif
+
#include "webrtc/modules/audio_device/dummy/audio_device_dummy.h"
#include "webrtc/modules/audio_device/dummy/audio_device_utility_dummy.h"
+#include "webrtc/modules/audio_device/dummy/file_audio_device.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
@@ -203,6 +209,14 @@ int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects()
{
ptrAudioDeviceUtility = new AudioDeviceUtilityDummy(Id());
}
+#elif defined(WEBRTC_DUMMY_FILE_DEVICES)
+ ptrAudioDevice = FileAudioDeviceFactory::CreateFileAudioDevice(Id());
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "Will use file-playing dummy device.");
+ if (ptrAudioDevice != NULL)
+ {
+ ptrAudioDeviceUtility = new AudioDeviceUtilityDummy(Id());
+ }
#else
const AudioLayer audioLayer(PlatformAudioLayer());
@@ -641,27 +655,6 @@ bool AudioDeviceModuleImpl::Initialized() const
}
// ----------------------------------------------------------------------------
-// SpeakerIsAvailable
-// ----------------------------------------------------------------------------
-
-int32_t AudioDeviceModuleImpl::SpeakerIsAvailable(bool* available)
-{
- CHECK_INITIALIZED();
-
- bool isAvailable(0);
-
- if (_ptrAudioDevice->SpeakerIsAvailable(isAvailable) == -1)
- {
- return -1;
- }
-
- *available = isAvailable;
-
- WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", available);
- return (0);
-}
-
-// ----------------------------------------------------------------------------
// InitSpeaker
// ----------------------------------------------------------------------------
@@ -672,27 +665,6 @@ int32_t AudioDeviceModuleImpl::InitSpeaker()
}
// ----------------------------------------------------------------------------
-// MicrophoneIsAvailable
-// ----------------------------------------------------------------------------
-
-int32_t AudioDeviceModuleImpl::MicrophoneIsAvailable(bool* available)
-{
- CHECK_INITIALIZED();
-
- bool isAvailable(0);
-
- if (_ptrAudioDevice->MicrophoneIsAvailable(isAvailable) == -1)
- {
- return -1;
- }
-
- *available = isAvailable;
-
- WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
- return (0);
-}
-
-// ----------------------------------------------------------------------------
// InitMicrophone
// ----------------------------------------------------------------------------
@@ -1750,8 +1722,6 @@ int32_t AudioDeviceModuleImpl::StopRawOutputFileRecording()
CHECK_INITIALIZED();
return (_audioDeviceBuffer.StopOutputFileRecording());
-
- return 0;
}
// ----------------------------------------------------------------------------
diff --git a/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h b/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h
index 26bd54399b2..a545d580f4b 100644
--- a/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_device/audio_device_impl.h
@@ -115,10 +115,8 @@ public:
uint16_t* volumeRight) const OVERRIDE;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool* available) OVERRIDE;
virtual int32_t InitSpeaker() OVERRIDE;
virtual bool SpeakerIsInitialized() const OVERRIDE;
- virtual int32_t MicrophoneIsAvailable(bool* available) OVERRIDE;
virtual int32_t InitMicrophone() OVERRIDE;
virtual bool MicrophoneIsInitialized() const OVERRIDE;
diff --git a/chromium/third_party/webrtc/modules/audio_device/audio_device_tests.isolate b/chromium/third_party/webrtc/modules/audio_device/audio_device_tests.isolate
index 69e877c14f1..ebe8bfb40b9 100644
--- a/chromium/third_party/webrtc/modules/audio_device/audio_device_tests.isolate
+++ b/chromium/third_party/webrtc/modules/audio_device/audio_device_tests.isolate
@@ -8,27 +8,25 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../../data/',
- '../../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/audio_device_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/audio_device_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.cc b/chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.cc
index 379b25766d8..92199f6d295 100644
--- a/chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.cc
@@ -91,14 +91,10 @@ int32_t AudioDeviceDummy::WaveOutVolume(uint16_t& volumeLeft,
return -1;
}
-int32_t AudioDeviceDummy::SpeakerIsAvailable(bool& available) { return -1; }
-
int32_t AudioDeviceDummy::InitSpeaker() { return -1; }
bool AudioDeviceDummy::SpeakerIsInitialized() const { return false; }
-int32_t AudioDeviceDummy::MicrophoneIsAvailable(bool& available) { return -1; }
-
int32_t AudioDeviceDummy::InitMicrophone() { return -1; }
bool AudioDeviceDummy::MicrophoneIsInitialized() const { return false; }
diff --git a/chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.h b/chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.h
index 47d7aab79e4..41e52e95443 100644
--- a/chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.h
+++ b/chromium/third_party/webrtc/modules/audio_device/dummy/audio_device_dummy.h
@@ -76,10 +76,8 @@ class AudioDeviceDummy : public AudioDeviceGeneric {
uint16_t& volumeRight) const OVERRIDE;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool& available) OVERRIDE;
virtual int32_t InitSpeaker() OVERRIDE;
virtual bool SpeakerIsInitialized() const OVERRIDE;
- virtual int32_t MicrophoneIsAvailable(bool& available) OVERRIDE;
virtual int32_t InitMicrophone() OVERRIDE;
virtual bool MicrophoneIsInitialized() const OVERRIDE;
diff --git a/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device.cc b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device.cc
new file mode 100644
index 00000000000..e7771c66de8
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device.cc
@@ -0,0 +1,586 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include <iostream>
+#include "webrtc/modules/audio_device/dummy/file_audio_device.h"
+#include "webrtc/system_wrappers/interface/sleep.h"
+#include "webrtc/system_wrappers/interface/thread_wrapper.h"
+
+namespace webrtc {
+
+int kRecordingFixedSampleRate = 48000;
+int kRecordingNumChannels = 2;
+int kPlayoutFixedSampleRate = 48000;
+int kPlayoutNumChannels = 2;
+int kPlayoutBufferSize = kPlayoutFixedSampleRate / 100
+ * kPlayoutNumChannels * 2;
+int kRecordingBufferSize = kRecordingFixedSampleRate / 100
+ * kRecordingNumChannels * 2;
+
+FileAudioDevice::FileAudioDevice(const int32_t id,
+ const char* inputFilename,
+ const char* outputFile):
+ _ptrAudioBuffer(NULL),
+ _recordingBuffer(NULL),
+ _playoutBuffer(NULL),
+ _recordingFramesLeft(0),
+ _playoutFramesLeft(0),
+ _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+ _recordingBufferSizeIn10MS(0),
+ _recordingFramesIn10MS(0),
+ _playoutFramesIn10MS(0),
+ _ptrThreadRec(NULL),
+ _ptrThreadPlay(NULL),
+ _recThreadID(0),
+ _playThreadID(0),
+ _playing(false),
+ _recording(false),
+ _lastCallPlayoutMillis(0),
+ _lastCallRecordMillis(0),
+ _outputFile(*FileWrapper::Create()),
+ _inputFile(*FileWrapper::Create()),
+ _outputFilename(outputFile),
+ _inputFilename(inputFilename),
+ _clock(Clock::GetRealTimeClock()) {
+}
+
+FileAudioDevice::~FileAudioDevice() {
+ _outputFile.Flush();
+ _outputFile.CloseFile();
+ delete &_outputFile;
+ _inputFile.Flush();
+ _inputFile.CloseFile();
+ delete &_inputFile;
+}
+
+int32_t FileAudioDevice::ActiveAudioLayer(
+ AudioDeviceModule::AudioLayer& audioLayer) const {
+ return -1;
+}
+
+int32_t FileAudioDevice::Init() { return 0; }
+
+int32_t FileAudioDevice::Terminate() { return 0; }
+
+bool FileAudioDevice::Initialized() const { return true; }
+
+int16_t FileAudioDevice::PlayoutDevices() {
+ return 1;
+}
+
+int16_t FileAudioDevice::RecordingDevices() {
+ return 1;
+}
+
+int32_t FileAudioDevice::PlayoutDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) {
+ const char* kName = "dummy_device";
+ const char* kGuid = "dummy_device_unique_id";
+ if (index < 1) {
+ memset(name, 0, kAdmMaxDeviceNameSize);
+ memset(guid, 0, kAdmMaxGuidSize);
+ memcpy(name, kName, strlen(kName));
+ memcpy(guid, kGuid, strlen(guid));
+ return 0;
+ }
+ return -1;
+}
+
+int32_t FileAudioDevice::RecordingDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) {
+ const char* kName = "dummy_device";
+ const char* kGuid = "dummy_device_unique_id";
+ if (index < 1) {
+ memset(name, 0, kAdmMaxDeviceNameSize);
+ memset(guid, 0, kAdmMaxGuidSize);
+ memcpy(name, kName, strlen(kName));
+ memcpy(guid, kGuid, strlen(guid));
+ return 0;
+ }
+ return -1;
+}
+
+int32_t FileAudioDevice::SetPlayoutDevice(uint16_t index) {
+ if (index == 0) {
+ _playout_index = index;
+ return 0;
+ }
+ return -1;
+}
+
+int32_t FileAudioDevice::SetPlayoutDevice(
+ AudioDeviceModule::WindowsDeviceType device) {
+ return -1;
+}
+
+int32_t FileAudioDevice::SetRecordingDevice(uint16_t index) {
+ if (index == 0) {
+ _record_index = index;
+ return _record_index;
+ }
+ return -1;
+}
+
+int32_t FileAudioDevice::SetRecordingDevice(
+ AudioDeviceModule::WindowsDeviceType device) {
+ return -1;
+}
+
+int32_t FileAudioDevice::PlayoutIsAvailable(bool& available) {
+ if (_playout_index == 0) {
+ available = true;
+ return _playout_index;
+ }
+ available = false;
+ return -1;
+}
+
+int32_t FileAudioDevice::InitPlayout() {
+ if (_ptrAudioBuffer)
+ {
+ // Update webrtc audio buffer with the selected parameters
+ _ptrAudioBuffer->SetPlayoutSampleRate(kPlayoutFixedSampleRate);
+ _ptrAudioBuffer->SetPlayoutChannels(kPlayoutNumChannels);
+ }
+ return 0;
+}
+
+bool FileAudioDevice::PlayoutIsInitialized() const {
+ return true;
+}
+
+int32_t FileAudioDevice::RecordingIsAvailable(bool& available) {
+ if (_record_index == 0) {
+ available = true;
+ return _record_index;
+ }
+ available = false;
+ return -1;
+}
+
+int32_t FileAudioDevice::InitRecording() {
+ CriticalSectionScoped lock(&_critSect);
+
+ if (_recording) {
+ return -1;
+ }
+
+ _recordingFramesIn10MS = kRecordingFixedSampleRate/100;
+
+ if (_ptrAudioBuffer) {
+ _ptrAudioBuffer->SetRecordingSampleRate(kRecordingFixedSampleRate);
+ _ptrAudioBuffer->SetRecordingChannels(kRecordingNumChannels);
+ }
+ return 0;
+}
+
+bool FileAudioDevice::RecordingIsInitialized() const {
+ return true;
+}
+
+int32_t FileAudioDevice::StartPlayout() {
+ if (_playing)
+ {
+ return 0;
+ }
+
+ _playing = true;
+ _playoutFramesLeft = 0;
+
+ if (!_playoutBuffer)
+ _playoutBuffer = new int8_t[2 *
+ kPlayoutNumChannels *
+ kPlayoutFixedSampleRate/100];
+ if (!_playoutBuffer)
+ {
+ _playing = false;
+ return -1;
+ }
+
+ // PLAYOUT
+ const char* threadName = "webrtc_audio_module_play_thread";
+ _ptrThreadPlay = ThreadWrapper::CreateThread(PlayThreadFunc,
+ this,
+ kRealtimePriority,
+ threadName);
+ if (_ptrThreadPlay == NULL)
+ {
+ _playing = false;
+ delete [] _playoutBuffer;
+ _playoutBuffer = NULL;
+ return -1;
+ }
+
+ if (_outputFile.OpenFile(_outputFilename.c_str(),
+ false, false, false) == -1) {
+ printf("Failed to open playout file %s!", _outputFilename.c_str());
+ _playing = false;
+ delete [] _playoutBuffer;
+ _playoutBuffer = NULL;
+ return -1;
+ }
+
+ unsigned int threadID(0);
+ if (!_ptrThreadPlay->Start(threadID))
+ {
+ _playing = false;
+ delete _ptrThreadPlay;
+ _ptrThreadPlay = NULL;
+ delete [] _playoutBuffer;
+ _playoutBuffer = NULL;
+ return -1;
+ }
+ _playThreadID = threadID;
+
+ return 0;
+}
+
+int32_t FileAudioDevice::StopPlayout() {
+ {
+ CriticalSectionScoped lock(&_critSect);
+ _playing = false;
+ }
+
+ // stop playout thread first
+ if (_ptrThreadPlay && !_ptrThreadPlay->Stop())
+ {
+ return -1;
+ }
+ else {
+ delete _ptrThreadPlay;
+ _ptrThreadPlay = NULL;
+ }
+
+ CriticalSectionScoped lock(&_critSect);
+
+ _playoutFramesLeft = 0;
+ delete [] _playoutBuffer;
+ _playoutBuffer = NULL;
+ _outputFile.Flush();
+ _outputFile.CloseFile();
+ return 0;
+}
+
+bool FileAudioDevice::Playing() const {
+ return true;
+}
+
+int32_t FileAudioDevice::StartRecording() {
+ _recording = true;
+
+ // Make sure we only create the buffer once.
+ _recordingBufferSizeIn10MS = _recordingFramesIn10MS *
+ kRecordingNumChannels *
+ 2;
+ if (!_recordingBuffer) {
+ _recordingBuffer = new int8_t[_recordingBufferSizeIn10MS];
+ }
+
+ if (_inputFile.OpenFile(_inputFilename.c_str(), true,
+ true, false) == -1) {
+ printf("Failed to open audio input file %s!\n",
+ _inputFilename.c_str());
+ _recording = false;
+ delete[] _recordingBuffer;
+ _recordingBuffer = NULL;
+ return -1;
+ }
+
+ const char* threadName = "webrtc_audio_module_capture_thread";
+ _ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc,
+ this,
+ kRealtimePriority,
+ threadName);
+ if (_ptrThreadRec == NULL)
+ {
+ _recording = false;
+ delete [] _recordingBuffer;
+ _recordingBuffer = NULL;
+ return -1;
+ }
+
+ unsigned int threadID(0);
+ if (!_ptrThreadRec->Start(threadID))
+ {
+ _recording = false;
+ delete _ptrThreadRec;
+ _ptrThreadRec = NULL;
+ delete [] _recordingBuffer;
+ _recordingBuffer = NULL;
+ return -1;
+ }
+ _recThreadID = threadID;
+
+ return 0;
+}
+
+
+int32_t FileAudioDevice::StopRecording() {
+ {
+ CriticalSectionScoped lock(&_critSect);
+ _recording = false;
+ }
+
+ if (_ptrThreadRec && !_ptrThreadRec->Stop())
+ {
+ return -1;
+ }
+ else {
+ delete _ptrThreadRec;
+ _ptrThreadRec = NULL;
+ }
+
+ CriticalSectionScoped lock(&_critSect);
+ _recordingFramesLeft = 0;
+ if (_recordingBuffer)
+ {
+ delete [] _recordingBuffer;
+ _recordingBuffer = NULL;
+ }
+ return 0;
+}
+
+bool FileAudioDevice::Recording() const {
+ return _recording;
+}
+
+int32_t FileAudioDevice::SetAGC(bool enable) { return -1; }
+
+bool FileAudioDevice::AGC() const { return false; }
+
+int32_t FileAudioDevice::SetWaveOutVolume(uint16_t volumeLeft,
+ uint16_t volumeRight) {
+ return -1;
+}
+
+int32_t FileAudioDevice::WaveOutVolume(uint16_t& volumeLeft,
+ uint16_t& volumeRight) const {
+ return -1;
+}
+
+int32_t FileAudioDevice::InitSpeaker() { return -1; }
+
+bool FileAudioDevice::SpeakerIsInitialized() const { return false; }
+
+int32_t FileAudioDevice::InitMicrophone() { return 0; }
+
+bool FileAudioDevice::MicrophoneIsInitialized() const { return true; }
+
+int32_t FileAudioDevice::SpeakerVolumeIsAvailable(bool& available) {
+ return -1;
+}
+
+int32_t FileAudioDevice::SetSpeakerVolume(uint32_t volume) { return -1; }
+
+int32_t FileAudioDevice::SpeakerVolume(uint32_t& volume) const { return -1; }
+
+int32_t FileAudioDevice::MaxSpeakerVolume(uint32_t& maxVolume) const {
+ return -1;
+}
+
+int32_t FileAudioDevice::MinSpeakerVolume(uint32_t& minVolume) const {
+ return -1;
+}
+
+int32_t FileAudioDevice::SpeakerVolumeStepSize(uint16_t& stepSize) const {
+ return -1;
+}
+
+int32_t FileAudioDevice::MicrophoneVolumeIsAvailable(bool& available) {
+ return -1;
+}
+
+int32_t FileAudioDevice::SetMicrophoneVolume(uint32_t volume) { return -1; }
+
+int32_t FileAudioDevice::MicrophoneVolume(uint32_t& volume) const {
+ return -1;
+}
+
+int32_t FileAudioDevice::MaxMicrophoneVolume(uint32_t& maxVolume) const {
+ return -1;
+}
+
+int32_t FileAudioDevice::MinMicrophoneVolume(uint32_t& minVolume) const {
+ return -1;
+}
+
+int32_t FileAudioDevice::MicrophoneVolumeStepSize(uint16_t& stepSize) const {
+ return -1;
+}
+
+int32_t FileAudioDevice::SpeakerMuteIsAvailable(bool& available) { return -1; }
+
+int32_t FileAudioDevice::SetSpeakerMute(bool enable) { return -1; }
+
+int32_t FileAudioDevice::SpeakerMute(bool& enabled) const { return -1; }
+
+int32_t FileAudioDevice::MicrophoneMuteIsAvailable(bool& available) {
+ return -1;
+}
+
+int32_t FileAudioDevice::SetMicrophoneMute(bool enable) { return -1; }
+
+int32_t FileAudioDevice::MicrophoneMute(bool& enabled) const { return -1; }
+
+int32_t FileAudioDevice::MicrophoneBoostIsAvailable(bool& available) {
+ return -1;
+}
+
+int32_t FileAudioDevice::SetMicrophoneBoost(bool enable) { return -1; }
+
+int32_t FileAudioDevice::MicrophoneBoost(bool& enabled) const { return -1; }
+
+int32_t FileAudioDevice::StereoPlayoutIsAvailable(bool& available) {
+ available = true;
+ return 0;
+}
+int32_t FileAudioDevice::SetStereoPlayout(bool enable) {
+ return 0;
+}
+
+int32_t FileAudioDevice::StereoPlayout(bool& enabled) const {
+ enabled = true;
+ return 0;
+}
+
+int32_t FileAudioDevice::StereoRecordingIsAvailable(bool& available) {
+ available = true;
+ return 0;
+}
+
+int32_t FileAudioDevice::SetStereoRecording(bool enable) {
+ return 0;
+}
+
+int32_t FileAudioDevice::StereoRecording(bool& enabled) const {
+ enabled = true;
+ return 0;
+}
+
+int32_t FileAudioDevice::SetPlayoutBuffer(
+ const AudioDeviceModule::BufferType type,
+ uint16_t sizeMS) {
+ return 0;
+}
+
+int32_t FileAudioDevice::PlayoutBuffer(AudioDeviceModule::BufferType& type,
+ uint16_t& sizeMS) const {
+ type = _playBufType;
+ return 0;
+}
+
+int32_t FileAudioDevice::PlayoutDelay(uint16_t& delayMS) const {
+ return 0;
+}
+
+int32_t FileAudioDevice::RecordingDelay(uint16_t& delayMS) const { return -1; }
+
+int32_t FileAudioDevice::CPULoad(uint16_t& load) const { return -1; }
+
+bool FileAudioDevice::PlayoutWarning() const { return false; }
+
+bool FileAudioDevice::PlayoutError() const { return false; }
+
+bool FileAudioDevice::RecordingWarning() const { return false; }
+
+bool FileAudioDevice::RecordingError() const { return false; }
+
+void FileAudioDevice::ClearPlayoutWarning() {}
+
+void FileAudioDevice::ClearPlayoutError() {}
+
+void FileAudioDevice::ClearRecordingWarning() {}
+
+void FileAudioDevice::ClearRecordingError() {}
+
+void FileAudioDevice::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
+ CriticalSectionScoped lock(&_critSect);
+
+ _ptrAudioBuffer = audioBuffer;
+
+ // Inform the AudioBuffer about default settings for this implementation.
+ // Set all values to zero here since the actual settings will be done by
+ // InitPlayout and InitRecording later.
+ _ptrAudioBuffer->SetRecordingSampleRate(0);
+ _ptrAudioBuffer->SetPlayoutSampleRate(0);
+ _ptrAudioBuffer->SetRecordingChannels(0);
+ _ptrAudioBuffer->SetPlayoutChannels(0);
+}
+
+bool FileAudioDevice::PlayThreadFunc(void* pThis)
+{
+ return (static_cast<FileAudioDevice*>(pThis)->PlayThreadProcess());
+}
+
+bool FileAudioDevice::RecThreadFunc(void* pThis)
+{
+ return (static_cast<FileAudioDevice*>(pThis)->RecThreadProcess());
+}
+
+bool FileAudioDevice::PlayThreadProcess()
+{
+ if(!_playing)
+ return false;
+
+ uint64_t currentTime = _clock->CurrentNtpInMilliseconds();
+ _critSect.Enter();
+
+ if (_lastCallPlayoutMillis == 0 ||
+ currentTime - _lastCallPlayoutMillis >= 10)
+ {
+ _critSect.Leave();
+ _ptrAudioBuffer->RequestPlayoutData(_playoutFramesIn10MS);
+ _critSect.Enter();
+
+ _playoutFramesLeft = _ptrAudioBuffer->GetPlayoutData(_playoutBuffer);
+ assert(_playoutFramesLeft == _playoutFramesIn10MS);
+ if (_outputFile.Open()) {
+ _outputFile.Write(_playoutBuffer, kPlayoutBufferSize);
+ _outputFile.Flush();
+ }
+ _lastCallPlayoutMillis = currentTime;
+ }
+ _playoutFramesLeft = 0;
+ _critSect.Leave();
+ SleepMs(10 - (_clock->CurrentNtpInMilliseconds() - currentTime));
+ return true;
+}
+
+bool FileAudioDevice::RecThreadProcess()
+{
+ if (!_recording)
+ return false;
+
+ uint64_t currentTime = _clock->CurrentNtpInMilliseconds();
+ _critSect.Enter();
+
+ if (_lastCallRecordMillis == 0 ||
+ currentTime - _lastCallRecordMillis >= 10) {
+ if (_inputFile.Open()) {
+ if (_inputFile.Read(_recordingBuffer, kRecordingBufferSize) > 0) {
+ _ptrAudioBuffer->SetRecordedBuffer(_recordingBuffer,
+ _recordingFramesIn10MS);
+ } else {
+ _inputFile.Rewind();
+ }
+ _lastCallRecordMillis = currentTime;
+ _critSect.Leave();
+ _ptrAudioBuffer->DeliverRecordedData();
+ _critSect.Enter();
+ }
+ }
+
+ _critSect.Leave();
+ SleepMs(10 - (_clock->CurrentNtpInMilliseconds() - currentTime));
+ return true;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device.h b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device.h
new file mode 100644
index 00000000000..6f417eb2e0b
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device.h
@@ -0,0 +1,202 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_FILE_AUDIO_DEVICE_H
+#define WEBRTC_AUDIO_DEVICE_FILE_AUDIO_DEVICE_H
+
+#include <stdio.h>
+
+#include <string>
+
+#include "webrtc/modules/audio_device/audio_device_generic.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/file_wrapper.h"
+#include "webrtc/system_wrappers/interface/clock.h"
+
+namespace webrtc {
+class EventWrapper;
+class ThreadWrapper;
+
+// This is a fake audio device which plays audio from a file as its microphone
+// and plays out into a file.
+class FileAudioDevice : public AudioDeviceGeneric {
+ public:
+ // Constructs a file audio device with |id|. It will read audio from
+ // |inputFilename| and record output audio to |outputFilename|.
+ //
+ // The input file should be a readable 48k stereo raw file, and the output
+ // file should point to a writable location. The output format will also be
+ // 48k stereo raw audio.
+ FileAudioDevice(const int32_t id,
+ const char* inputFilename,
+ const char* outputFilename);
+ virtual ~FileAudioDevice();
+
+ // Retrieve the currently utilized audio layer
+ virtual int32_t ActiveAudioLayer(
+ AudioDeviceModule::AudioLayer& audioLayer) const OVERRIDE;
+
+ // Main initializaton and termination
+ virtual int32_t Init() OVERRIDE;
+ virtual int32_t Terminate() OVERRIDE;
+ virtual bool Initialized() const OVERRIDE;
+
+ // Device enumeration
+ virtual int16_t PlayoutDevices() OVERRIDE;
+ virtual int16_t RecordingDevices() OVERRIDE;
+ virtual int32_t PlayoutDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) OVERRIDE;
+ virtual int32_t RecordingDeviceName(uint16_t index,
+ char name[kAdmMaxDeviceNameSize],
+ char guid[kAdmMaxGuidSize]) OVERRIDE;
+
+ // Device selection
+ virtual int32_t SetPlayoutDevice(uint16_t index) OVERRIDE;
+ virtual int32_t SetPlayoutDevice(
+ AudioDeviceModule::WindowsDeviceType device) OVERRIDE;
+ virtual int32_t SetRecordingDevice(uint16_t index) OVERRIDE;
+ virtual int32_t SetRecordingDevice(
+ AudioDeviceModule::WindowsDeviceType device) OVERRIDE;
+
+ // Audio transport initialization
+ virtual int32_t PlayoutIsAvailable(bool& available) OVERRIDE;
+ virtual int32_t InitPlayout() OVERRIDE;
+ virtual bool PlayoutIsInitialized() const OVERRIDE;
+ virtual int32_t RecordingIsAvailable(bool& available) OVERRIDE;
+ virtual int32_t InitRecording() OVERRIDE;
+ virtual bool RecordingIsInitialized() const OVERRIDE;
+
+ // Audio transport control
+ virtual int32_t StartPlayout() OVERRIDE;
+ virtual int32_t StopPlayout() OVERRIDE;
+ virtual bool Playing() const OVERRIDE;
+ virtual int32_t StartRecording() OVERRIDE;
+ virtual int32_t StopRecording() OVERRIDE;
+ virtual bool Recording() const OVERRIDE;
+
+ // Microphone Automatic Gain Control (AGC)
+ virtual int32_t SetAGC(bool enable) OVERRIDE;
+ virtual bool AGC() const OVERRIDE;
+
+ // Volume control based on the Windows Wave API (Windows only)
+ virtual int32_t SetWaveOutVolume(uint16_t volumeLeft,
+ uint16_t volumeRight) OVERRIDE;
+ virtual int32_t WaveOutVolume(uint16_t& volumeLeft,
+ uint16_t& volumeRight) const OVERRIDE;
+
+ // Audio mixer initialization
+ virtual int32_t InitSpeaker() OVERRIDE;
+ virtual bool SpeakerIsInitialized() const OVERRIDE;
+ virtual int32_t InitMicrophone() OVERRIDE;
+ virtual bool MicrophoneIsInitialized() const OVERRIDE;
+
+ // Speaker volume controls
+ virtual int32_t SpeakerVolumeIsAvailable(bool& available) OVERRIDE;
+ virtual int32_t SetSpeakerVolume(uint32_t volume) OVERRIDE;
+ virtual int32_t SpeakerVolume(uint32_t& volume) const OVERRIDE;
+ virtual int32_t MaxSpeakerVolume(uint32_t& maxVolume) const OVERRIDE;
+ virtual int32_t MinSpeakerVolume(uint32_t& minVolume) const OVERRIDE;
+ virtual int32_t SpeakerVolumeStepSize(uint16_t& stepSize) const OVERRIDE;
+
+ // Microphone volume controls
+ virtual int32_t MicrophoneVolumeIsAvailable(bool& available) OVERRIDE;
+ virtual int32_t SetMicrophoneVolume(uint32_t volume) OVERRIDE;
+ virtual int32_t MicrophoneVolume(uint32_t& volume) const OVERRIDE;
+ virtual int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const OVERRIDE;
+ virtual int32_t MinMicrophoneVolume(uint32_t& minVolume) const OVERRIDE;
+ virtual int32_t MicrophoneVolumeStepSize(uint16_t& stepSize) const OVERRIDE;
+
+ // Speaker mute control
+ virtual int32_t SpeakerMuteIsAvailable(bool& available) OVERRIDE;
+ virtual int32_t SetSpeakerMute(bool enable) OVERRIDE;
+ virtual int32_t SpeakerMute(bool& enabled) const OVERRIDE;
+
+ // Microphone mute control
+ virtual int32_t MicrophoneMuteIsAvailable(bool& available) OVERRIDE;
+ virtual int32_t SetMicrophoneMute(bool enable) OVERRIDE;
+ virtual int32_t MicrophoneMute(bool& enabled) const OVERRIDE;
+
+ // Microphone boost control
+ virtual int32_t MicrophoneBoostIsAvailable(bool& available) OVERRIDE;
+ virtual int32_t SetMicrophoneBoost(bool enable) OVERRIDE;
+ virtual int32_t MicrophoneBoost(bool& enabled) const OVERRIDE;
+
+ // Stereo support
+ virtual int32_t StereoPlayoutIsAvailable(bool& available) OVERRIDE;
+ virtual int32_t SetStereoPlayout(bool enable) OVERRIDE;
+ virtual int32_t StereoPlayout(bool& enabled) const OVERRIDE;
+ virtual int32_t StereoRecordingIsAvailable(bool& available) OVERRIDE;
+ virtual int32_t SetStereoRecording(bool enable) OVERRIDE;
+ virtual int32_t StereoRecording(bool& enabled) const OVERRIDE;
+
+ // Delay information and control
+ virtual int32_t SetPlayoutBuffer(const AudioDeviceModule::BufferType type,
+ uint16_t sizeMS) OVERRIDE;
+ virtual int32_t PlayoutBuffer(AudioDeviceModule::BufferType& type,
+ uint16_t& sizeMS) const OVERRIDE;
+ virtual int32_t PlayoutDelay(uint16_t& delayMS) const OVERRIDE;
+ virtual int32_t RecordingDelay(uint16_t& delayMS) const OVERRIDE;
+
+ // CPU load
+ virtual int32_t CPULoad(uint16_t& load) const OVERRIDE;
+
+ virtual bool PlayoutWarning() const OVERRIDE;
+ virtual bool PlayoutError() const OVERRIDE;
+ virtual bool RecordingWarning() const OVERRIDE;
+ virtual bool RecordingError() const OVERRIDE;
+ virtual void ClearPlayoutWarning() OVERRIDE;
+ virtual void ClearPlayoutError() OVERRIDE;
+ virtual void ClearRecordingWarning() OVERRIDE;
+ virtual void ClearRecordingError() OVERRIDE;
+
+ virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) OVERRIDE;
+
+ private:
+ static bool RecThreadFunc(void*);
+ static bool PlayThreadFunc(void*);
+ bool RecThreadProcess();
+ bool PlayThreadProcess();
+
+ int32_t _playout_index;
+ int32_t _record_index;
+ AudioDeviceModule::BufferType _playBufType;
+ AudioDeviceBuffer* _ptrAudioBuffer;
+ int8_t* _recordingBuffer; // In bytes.
+ int8_t* _playoutBuffer; // In bytes.
+ uint32_t _recordingFramesLeft;
+ uint32_t _playoutFramesLeft;
+ CriticalSectionWrapper& _critSect;
+
+ uint32_t _recordingBufferSizeIn10MS;
+ uint32_t _recordingFramesIn10MS;
+ uint32_t _playoutFramesIn10MS;
+
+ ThreadWrapper* _ptrThreadRec;
+ ThreadWrapper* _ptrThreadPlay;
+ uint32_t _recThreadID;
+ uint32_t _playThreadID;
+
+ bool _playing;
+ bool _recording;
+ uint64_t _lastCallPlayoutMillis;
+ uint64_t _lastCallRecordMillis;
+
+ FileWrapper& _outputFile;
+ FileWrapper& _inputFile;
+ std::string _outputFilename;
+ std::string _inputFilename;
+
+ Clock* _clock;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_AUDIO_DEVICE_FILE_AUDIO_DEVICE_H
diff --git a/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc
new file mode 100644
index 00000000000..db35bf111b1
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.cc
@@ -0,0 +1,43 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_device/dummy/file_audio_device_factory.h"
+
+#include <cstring>
+
+#include "webrtc/modules/audio_device/dummy/file_audio_device.h"
+
+namespace webrtc {
+
+char FileAudioDeviceFactory::_inputAudioFilename[MAX_FILENAME_LEN] = "";
+char FileAudioDeviceFactory::_outputAudioFilename[MAX_FILENAME_LEN] = "";
+
+FileAudioDevice* FileAudioDeviceFactory::CreateFileAudioDevice(
+ const int32_t id) {
+ // Bail out here if the files aren't set.
+ if (strlen(_inputAudioFilename) == 0 || strlen(_outputAudioFilename) == 0) {
+ printf("Was compiled with WEBRTC_DUMMY_AUDIO_PLAY_STATIC_FILE "
+ "but did not set input/output files to use. Bailing out.\n");
+ exit(1);
+ }
+ return new FileAudioDevice(id, _inputAudioFilename, _outputAudioFilename);
+}
+
+void FileAudioDeviceFactory::SetFilenamesToUse(
+ const char* inputAudioFilename, const char* outputAudioFilename) {
+ assert(strlen(inputAudioFilename) < MAX_FILENAME_LEN &&
+ strlen(outputAudioFilename) < MAX_FILENAME_LEN);
+
+ // Copy the strings since we don't know the lifetime of the input pointers.
+ strncpy(_inputAudioFilename, inputAudioFilename, MAX_FILENAME_LEN);
+ strncpy(_outputAudioFilename, outputAudioFilename, MAX_FILENAME_LEN);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.h b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.h
new file mode 100644
index 00000000000..9975d7b90e0
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_device/dummy/file_audio_device_factory.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_FILE_AUDIO_DEVICE_FACTORY_H
+#define WEBRTC_AUDIO_DEVICE_FILE_AUDIO_DEVICE_FACTORY_H
+
+#include "webrtc/common_types.h"
+
+namespace webrtc {
+
+class FileAudioDevice;
+
+// This class is used by audio_device_impl.cc when WebRTC is compiled with
+// WEBRTC_DUMMY_FILE_DEVICES. The application must include this file and set the
+// filenames to use before the audio device module is initialized. This is
+// intended for test tools which use the audio device module.
+class FileAudioDeviceFactory {
+ public:
+ static FileAudioDevice* CreateFileAudioDevice(const int32_t id);
+
+ // The input file must be a readable 48k stereo raw file. The output
+ // file must be writable. The strings will be copied.
+ static void SetFilenamesToUse(const char* inputAudioFilename,
+ const char* outputAudioFilename);
+
+ private:
+ static const uint32_t MAX_FILENAME_LEN = 256;
+ static char _inputAudioFilename[MAX_FILENAME_LEN];
+ static char _outputAudioFilename[MAX_FILENAME_LEN];
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_AUDIO_DEVICE_FILE_AUDIO_DEVICE_FACTORY_H
diff --git a/chromium/third_party/webrtc/modules/audio_device/include/audio_device.h b/chromium/third_party/webrtc/modules/audio_device/include/audio_device.h
index eb403aa9b98..ec40274de49 100644
--- a/chromium/third_party/webrtc/modules/audio_device/include/audio_device.h
+++ b/chromium/third_party/webrtc/modules/audio_device/include/audio_device.h
@@ -107,10 +107,8 @@ class AudioDeviceModule : public RefCountedModule {
uint16_t* volumeRight) const = 0;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool* available) = 0;
virtual int32_t InitSpeaker() = 0;
virtual bool SpeakerIsInitialized() const = 0;
- virtual int32_t MicrophoneIsAvailable(bool* available) = 0;
virtual int32_t InitMicrophone() = 0;
virtual bool MicrophoneIsInitialized() const = 0;
diff --git a/chromium/third_party/webrtc/modules/audio_device/include/audio_device_defines.h b/chromium/third_party/webrtc/modules/audio_device/include/audio_device_defines.h
index c37c4b13955..56a584ef9ea 100644
--- a/chromium/third_party/webrtc/modules/audio_device/include/audio_device_defines.h
+++ b/chromium/third_party/webrtc/modules/audio_device/include/audio_device_defines.h
@@ -63,14 +63,16 @@ public:
const int32_t clockDrift,
const uint32_t currentMicLevel,
const bool keyPressed,
- uint32_t& newMicLevel) = 0;
+ uint32_t& newMicLevel) = 0;
virtual int32_t NeedMorePlayData(const uint32_t nSamples,
const uint8_t nBytesPerSample,
const uint8_t nChannels,
const uint32_t samplesPerSec,
void* audioSamples,
- uint32_t& nSamplesOut) = 0;
+ uint32_t& nSamplesOut,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms) = 0;
// Method to pass captured data directly and unmixed to network channels.
// |channel_ids| contains a list of VoE channels which are the
@@ -85,8 +87,8 @@ public:
// will be ignored.
// The return value is the new microphone volume, in the range of |0, 255].
// When the volume does not need to be updated, it returns 0.
- // TODO(xians): Make the interface pure virtual after libjingle has its
- // implementation.
+ // TODO(xians): Remove this interface after Chrome and Libjingle switches
+ // to OnData().
virtual int OnDataAvailable(const int voe_channels[],
int number_of_voe_channels,
const int16_t* audio_data,
@@ -98,6 +100,37 @@ public:
bool key_pressed,
bool need_audio_processing) { return 0; }
+ // Method to pass the captured audio data to the specific VoE channel.
+ // |voe_channel| is the id of the VoE channel which is the sink to the
+ // capture data.
+ // TODO(xians): Remove this interface after Libjingle switches to
+ // PushCaptureData().
+ virtual void OnData(int voe_channel, const void* audio_data,
+ int bits_per_sample, int sample_rate,
+ int number_of_channels,
+ int number_of_frames) {}
+
+ // Method to push the captured audio data to the specific VoE channel.
+ // The data will not undergo audio processing.
+ // |voe_channel| is the id of the VoE channel which is the sink to the
+ // capture data.
+ // TODO(xians): Make the interface pure virtual after Libjingle
+ // has its implementation.
+ virtual void PushCaptureData(int voe_channel, const void* audio_data,
+ int bits_per_sample, int sample_rate,
+ int number_of_channels,
+ int number_of_frames) {}
+
+ // Method to pull mixed render audio data from all active VoE channels.
+ // The data will not be passed as reference for audio processing internally.
+ // TODO(xians): Support getting the unmixed render data from specific VoE
+ // channel.
+ virtual void PullRenderData(int bits_per_sample, int sample_rate,
+ int number_of_channels, int number_of_frames,
+ void* audio_data,
+ int64_t* elapsed_time_ms,
+ int64_t* ntp_time_ms) {}
+
protected:
virtual ~AudioTransport() {}
};
diff --git a/chromium/third_party/webrtc/modules/audio_device/include/fake_audio_device.h b/chromium/third_party/webrtc/modules/audio_device/include/fake_audio_device.h
index 0248317550d..5cdf54fe8cd 100644
--- a/chromium/third_party/webrtc/modules/audio_device/include/fake_audio_device.h
+++ b/chromium/third_party/webrtc/modules/audio_device/include/fake_audio_device.h
@@ -25,19 +25,11 @@ class FakeAudioDeviceModule : public AudioDeviceModule {
return 0;
}
virtual int32_t Init() { return 0; }
- virtual int32_t SpeakerIsAvailable(bool* available) {
- *available = true;
- return 0;
- }
virtual int32_t InitSpeaker() { return 0; }
virtual int32_t SetPlayoutDevice(uint16_t index) { return 0; }
virtual int32_t SetPlayoutDevice(WindowsDeviceType device) { return 0; }
virtual int32_t SetStereoPlayout(bool enable) { return 0; }
virtual int32_t StopPlayout() { return 0; }
- virtual int32_t MicrophoneIsAvailable(bool* available) {
- *available = true;
- return 0;
- }
virtual int32_t InitMicrophone() { return 0; }
virtual int32_t SetRecordingDevice(uint16_t index) { return 0; }
virtual int32_t SetRecordingDevice(WindowsDeviceType device) { return 0; }
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.cc b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.cc
index bad3915f14f..7a7189a2b7f 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.cc
@@ -175,15 +175,6 @@ bool AudioDeviceIPhone::Initialized() const {
return (_initialized);
}
-int32_t AudioDeviceIPhone::SpeakerIsAvailable(bool& available) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,
- "%s", __FUNCTION__);
-
- // speaker is always available in IOS
- available = true;
- return 0;
-}
-
int32_t AudioDeviceIPhone::InitSpeaker() {
WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,
"%s", __FUNCTION__);
@@ -214,30 +205,6 @@ int32_t AudioDeviceIPhone::InitSpeaker() {
return 0;
}
-int32_t AudioDeviceIPhone::MicrophoneIsAvailable(bool& available) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,
- "%s", __FUNCTION__);
-
- available = false;
-
- OSStatus result = -1;
- UInt32 channel = 0;
- UInt32 size = sizeof(channel);
- result = AudioSessionGetProperty(kAudioSessionProperty_AudioInputAvailable,
- &size, &channel);
- if (channel != 0) {
- // API is not supported on this platform, we return available = true
- WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
- _id, " API call not supported on this version");
- available = true;
- return 0;
- }
-
- available = (channel == 0) ? false : true;
-
- return 0;
-}
-
int32_t AudioDeviceIPhone::InitMicrophone() {
WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,
"%s", __FUNCTION__);
@@ -1332,7 +1299,7 @@ int32_t AudioDeviceIPhone::InitPlayOrRecord() {
// todo: Add 48 kHz (increase buffer sizes). Other fs?
if ((playoutDesc.mSampleRate > 44090.0)
&& (playoutDesc.mSampleRate < 44110.0)) {
- _adbSampFreq = 44000;
+ _adbSampFreq = 44100;
} else if ((playoutDesc.mSampleRate > 15990.0)
&& (playoutDesc.mSampleRate < 16010.0)) {
_adbSampFreq = 16000;
@@ -1716,7 +1683,10 @@ void AudioDeviceIPhone::UpdatePlayoutDelay() {
if (_playoutDelayMeasurementCounter >= 100) {
// Update HW and OS delay every second, unlikely to change
- _playoutDelay = 0;
+ // Since this is eventually rounded to integral ms, add 0.5ms
+ // here to get round-to-nearest-int behavior instead of
+ // truncation.
+ float totalDelaySeconds = 0.0005;
// HW output latency
Float32 f32(0);
@@ -1727,7 +1697,8 @@ void AudioDeviceIPhone::UpdatePlayoutDelay() {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"error HW latency (result=%d)", result);
}
- _playoutDelay += static_cast<int>(f32 * 1000000);
+ assert(f32 >= 0);
+ totalDelaySeconds += f32;
// HW buffer duration
f32 = 0;
@@ -1737,7 +1708,8 @@ void AudioDeviceIPhone::UpdatePlayoutDelay() {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"error HW buffer duration (result=%d)", result);
}
- _playoutDelay += static_cast<int>(f32 * 1000000);
+ assert(f32 >= 0);
+ totalDelaySeconds += f32;
// AU latency
Float64 f64(0);
@@ -1748,16 +1720,17 @@ void AudioDeviceIPhone::UpdatePlayoutDelay() {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"error AU latency (result=%d)", result);
}
- _playoutDelay += static_cast<int>(f64 * 1000000);
+ assert(f64 >= 0);
+ totalDelaySeconds += f64;
// To ms
- _playoutDelay = (_playoutDelay - 500) / 1000;
+ _playoutDelay = static_cast<uint32_t>(totalDelaySeconds / 1000);
// Reset counter
_playoutDelayMeasurementCounter = 0;
}
- // todo: Add playout buffer? (Only used for 44.1 kHz)
+ // todo: Add playout buffer?
}
void AudioDeviceIPhone::UpdateRecordingDelay() {
@@ -1766,7 +1739,10 @@ void AudioDeviceIPhone::UpdateRecordingDelay() {
if (_recordingDelayMeasurementCounter >= 100) {
// Update HW and OS delay every second, unlikely to change
- _recordingDelayHWAndOS = 0;
+ // Since this is eventually rounded to integral ms, add 0.5ms
+ // here to get round-to-nearest-int behavior instead of
+ // truncation.
+ float totalDelaySeconds = 0.0005;
// HW input latency
Float32 f32(0);
@@ -1777,7 +1753,8 @@ void AudioDeviceIPhone::UpdateRecordingDelay() {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"error HW latency (result=%d)", result);
}
- _recordingDelayHWAndOS += static_cast<int>(f32 * 1000000);
+ assert(f32 >= 0);
+ totalDelaySeconds += f32;
// HW buffer duration
f32 = 0;
@@ -1787,7 +1764,8 @@ void AudioDeviceIPhone::UpdateRecordingDelay() {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"error HW buffer duration (result=%d)", result);
}
- _recordingDelayHWAndOS += static_cast<int>(f32 * 1000000);
+ assert(f32 >= 0);
+ totalDelaySeconds += f32;
// AU latency
Float64 f64(0);
@@ -1799,10 +1777,12 @@ void AudioDeviceIPhone::UpdateRecordingDelay() {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"error AU latency (result=%d)", result);
}
- _recordingDelayHWAndOS += static_cast<int>(f64 * 1000000);
+ assert(f64 >= 0);
+ totalDelaySeconds += f64;
// To ms
- _recordingDelayHWAndOS = (_recordingDelayHWAndOS - 500) / 1000;
+ _recordingDelayHWAndOS =
+ static_cast<uint32_t>(totalDelaySeconds / 1000);
// Reset counter
_recordingDelayMeasurementCounter = 0;
diff --git a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h
index fdaf94d6a60..011b6acf481 100644
--- a/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h
+++ b/chromium/third_party/webrtc/modules/audio_device/ios/audio_device_ios.h
@@ -90,10 +90,8 @@ public:
uint16_t& volumeRight) const;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool& available);
virtual int32_t InitSpeaker();
virtual bool SpeakerIsInitialized() const;
- virtual int32_t MicrophoneIsAvailable(bool& available);
virtual int32_t InitMicrophone();
virtual bool MicrophoneIsInitialized() const;
diff --git a/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc b/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc
index caa1efed8ff..67a845c845b 100644
--- a/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc
@@ -180,7 +180,7 @@ int32_t AudioDeviceLinuxALSA::Init()
{
return 0;
}
-
+#if defined(USE_X11)
//Get X display handle for typing detection
_XDisplay = XOpenDisplay(NULL);
if (!_XDisplay)
@@ -188,7 +188,7 @@ int32_t AudioDeviceLinuxALSA::Init()
WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
" failed to open X display, typing detection will not work");
}
-
+#endif
_playWarning = 0;
_playError = 0;
_recWarning = 0;
@@ -254,13 +254,13 @@ int32_t AudioDeviceLinuxALSA::Terminate()
_critSect.Enter();
}
-
+#if defined(USE_X11)
if (_XDisplay)
{
XCloseDisplay(_XDisplay);
_XDisplay = NULL;
}
-
+#endif
_initialized = false;
_outputDeviceIsSpecified = false;
_inputDeviceIsSpecified = false;
@@ -273,34 +273,6 @@ bool AudioDeviceLinuxALSA::Initialized() const
return (_initialized);
}
-int32_t AudioDeviceLinuxALSA::SpeakerIsAvailable(bool& available)
-{
-
- bool wasInitialized = _mixerManager.SpeakerIsInitialized();
-
- // Make an attempt to open up the
- // output mixer corresponding to the currently selected output device.
- //
- if (!wasInitialized && InitSpeaker() == -1)
- {
- available = false;
- return 0;
- }
-
- // Given that InitSpeaker was successful, we know that a valid speaker
- // exists
- available = true;
-
- // Close the initialized output mixer
- //
- if (!wasInitialized)
- {
- _mixerManager.CloseSpeaker();
- }
-
- return 0;
-}
-
int32_t AudioDeviceLinuxALSA::InitSpeaker()
{
@@ -316,34 +288,6 @@ int32_t AudioDeviceLinuxALSA::InitSpeaker()
return _mixerManager.OpenSpeaker(devName);
}
-int32_t AudioDeviceLinuxALSA::MicrophoneIsAvailable(bool& available)
-{
-
- bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
-
- // Make an attempt to open up the
- // input mixer corresponding to the currently selected output device.
- //
- if (!wasInitialized && InitMicrophone() == -1)
- {
- available = false;
- return 0;
- }
-
- // Given that InitMicrophone was successful, we know that a valid
- // microphone exists
- available = true;
-
- // Close the initialized input mixer
- //
- if (!wasInitialized)
- {
- _mixerManager.CloseMicrophone();
- }
-
- return 0;
-}
-
int32_t AudioDeviceLinuxALSA::InitMicrophone()
{
@@ -2342,7 +2286,7 @@ bool AudioDeviceLinuxALSA::RecThreadProcess()
bool AudioDeviceLinuxALSA::KeyPressed() const{
-
+#if defined(USE_X11)
char szKey[32];
unsigned int i = 0;
char state = 0;
@@ -2360,5 +2304,8 @@ bool AudioDeviceLinuxALSA::KeyPressed() const{
// Save old state
memcpy((char*)_oldKeyState, (char*)szKey, sizeof(_oldKeyState));
return (state != 0);
+#else
+ return false;
+#endif
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h b/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h
index 35abc152fc8..1d75c8e94a0 100644
--- a/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h
+++ b/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h
@@ -15,8 +15,9 @@
#include "webrtc/modules/audio_device/linux/audio_mixer_manager_alsa_linux.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-
+#if defined(USE_X11)
#include <X11/Xlib.h>
+#endif
#include <alsa/asoundlib.h>
#include <sys/ioctl.h>
#include <sys/soundcard.h>
@@ -89,10 +90,8 @@ public:
uint16_t& volumeRight) const OVERRIDE;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool& available) OVERRIDE;
virtual int32_t InitSpeaker() OVERRIDE;
virtual bool SpeakerIsInitialized() const OVERRIDE;
- virtual int32_t MicrophoneIsAvailable(bool& available) OVERRIDE;
virtual int32_t InitMicrophone() OVERRIDE;
virtual bool MicrophoneIsInitialized() const OVERRIDE;
@@ -135,7 +134,7 @@ public:
virtual int32_t StereoRecordingIsAvailable(bool& available) OVERRIDE;
virtual int32_t SetStereoRecording(bool enable) OVERRIDE;
virtual int32_t StereoRecording(bool& enabled) const OVERRIDE;
-
+
// Delay information and control
virtual int32_t SetPlayoutBuffer(
const AudioDeviceModule::BufferType type,
@@ -174,8 +173,8 @@ private:
bool KeyPressed() const;
private:
- void Lock() { _critSect.Enter(); };
- void UnLock() { _critSect.Leave(); };
+ void Lock() EXCLUSIVE_LOCK_FUNCTION(_critSect) { _critSect.Enter(); };
+ void UnLock() UNLOCK_FUNCTION(_critSect) { _critSect.Leave(); };
private:
inline int32_t InputSanityCheckAfterUnlockedPeriod() const;
inline int32_t OutputSanityCheckAfterUnlockedPeriod() const;
@@ -188,7 +187,7 @@ private:
private:
AudioDeviceBuffer* _ptrAudioBuffer;
-
+
CriticalSectionWrapper& _critSect;
ThreadWrapper* _ptrThreadRec;
@@ -250,7 +249,9 @@ private:
uint16_t _playBufDelayFixed; // fixed playback delay
char _oldKeyState[32];
+#if defined(USE_X11)
Display* _XDisplay;
+#endif
};
}
diff --git a/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc b/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc
index e095eed0d5b..00d4afed892 100644
--- a/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc
@@ -341,34 +341,6 @@ bool AudioDeviceLinuxPulse::Initialized() const
return (_initialized);
}
-int32_t AudioDeviceLinuxPulse::SpeakerIsAvailable(bool& available)
-{
-
- bool wasInitialized = _mixerManager.SpeakerIsInitialized();
-
- // Make an attempt to open up the
- // output mixer corresponding to the currently selected output device.
- //
- if (!wasInitialized && InitSpeaker() == -1)
- {
- available = false;
- return 0;
- }
-
- // Given that InitSpeaker was successful, we know that a valid speaker exists
- //
- available = true;
-
- // Close the initialized output mixer
- //
- if (!wasInitialized)
- {
- _mixerManager.CloseSpeaker();
- }
-
- return 0;
-}
-
int32_t AudioDeviceLinuxPulse::InitSpeaker()
{
@@ -414,34 +386,6 @@ int32_t AudioDeviceLinuxPulse::InitSpeaker()
return 0;
}
-int32_t AudioDeviceLinuxPulse::MicrophoneIsAvailable(bool& available)
-{
-
- bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
-
- // Make an attempt to open up the
- // input mixer corresponding to the currently selected output device.
- //
- if (!wasInitialized && InitMicrophone() == -1)
- {
- available = false;
- return 0;
- }
-
- // Given that InitMicrophone was successful, we know that a valid microphone
- // exists
- available = true;
-
- // Close the initialized input mixer
- //
- if (!wasInitialized)
- {
- _mixerManager.CloseMicrophone();
- }
-
- return 0;
-}
-
int32_t AudioDeviceLinuxPulse::InitMicrophone()
{
@@ -2613,7 +2557,7 @@ int32_t AudioDeviceLinuxPulse::ReadRecordedData(const void* bufferData,
int32_t AudioDeviceLinuxPulse::ProcessRecordedData(
int8_t *bufferData,
uint32_t bufferSizeInSamples,
- uint32_t recDelay)
+ uint32_t recDelay) EXCLUSIVE_LOCKS_REQUIRED(_critSect)
{
uint32_t currentMicLevel(0);
uint32_t newMicLevel(0);
diff --git a/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h b/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h
index 43228a1babf..cde7e464390 100644
--- a/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h
+++ b/chromium/third_party/webrtc/modules/audio_device/linux/audio_device_pulse_linux.h
@@ -151,10 +151,8 @@ public:
uint16_t& volumeRight) const OVERRIDE;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool& available) OVERRIDE;
virtual int32_t InitSpeaker() OVERRIDE;
virtual bool SpeakerIsInitialized() const OVERRIDE;
- virtual int32_t MicrophoneIsAvailable(bool& available) OVERRIDE;
virtual int32_t InitMicrophone() OVERRIDE;
virtual bool MicrophoneIsInitialized() const OVERRIDE;
@@ -224,16 +222,12 @@ public:
virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) OVERRIDE;
private:
- void Lock()
- {
+ void Lock() EXCLUSIVE_LOCK_FUNCTION(_critSect) {
_critSect.Enter();
}
- ;
- void UnLock()
- {
+ void UnLock() UNLOCK_FUNCTION(_critSect) {
_critSect.Leave();
}
- ;
void WaitForOperationCompletion(pa_operation* paOperation) const;
void WaitForSuccess(pa_operation* paOperation) const;
diff --git a/chromium/third_party/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h b/chromium/third_party/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h
index b5186fa7bf2..052390a658e 100644
--- a/chromium/third_party/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h
+++ b/chromium/third_party/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h
@@ -32,7 +32,7 @@
#include <stddef.h> // for NULL
#include <string.h>
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/system_wrappers/interface/trace.h"
// This file provides macros for creating "symbol table" classes to simplify the
diff --git a/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.cc b/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.cc
index b07c94dd11c..2c875796f18 100644
--- a/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.cc
@@ -571,7 +571,6 @@ int32_t AudioDeviceMac::MicrophoneIsAvailable(bool& available)
return 0;
}
-
int32_t AudioDeviceMac::InitMicrophone()
{
diff --git a/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.h b/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.h
index a2662239286..fae4041133b 100644
--- a/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.h
+++ b/chromium/third_party/webrtc/modules/audio_device/mac/audio_device_mac.h
@@ -108,10 +108,8 @@ public:
uint16_t& volumeRight) const;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool& available);
virtual int32_t InitSpeaker();
virtual bool SpeakerIsInitialized() const;
- virtual int32_t MicrophoneIsAvailable(bool& available);
virtual int32_t InitMicrophone();
virtual bool MicrophoneIsInitialized() const;
@@ -167,7 +165,6 @@ public:
// CPU load
virtual int32_t CPULoad(uint16_t& load) const;
-public:
virtual bool PlayoutWarning() const;
virtual bool PlayoutError() const;
virtual bool RecordingWarning() const;
@@ -177,10 +174,12 @@ public:
virtual void ClearRecordingWarning();
virtual void ClearRecordingError();
-public:
virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
private:
+ virtual int32_t MicrophoneIsAvailable(bool& available);
+ virtual int32_t SpeakerIsAvailable(bool& available);
+
void Lock()
{
_critSect.Enter();
@@ -236,7 +235,6 @@ private:
int32_t
HandleProcessorOverload(AudioObjectPropertyAddress propertyAddress);
-private:
static OSStatus deviceIOProc(AudioDeviceID device,
const AudioTimeStamp *now,
const AudioBufferList *inputData,
@@ -284,10 +282,8 @@ private:
bool CaptureWorkerThread();
bool RenderWorkerThread();
-private:
bool KeyPressed();
-private:
AudioDeviceBuffer* _ptrAudioBuffer;
CriticalSectionWrapper& _critSect;
@@ -325,7 +321,6 @@ private:
AudioDeviceModule::BufferType _playBufType;
-private:
bool _initialized;
bool _isShutDown;
bool _recording;
@@ -361,7 +356,6 @@ private:
int32_t _renderDelayOffsetSamples;
-private:
uint16_t _playBufDelayFixed; // fixed playback delay
uint16_t _playWarning;
@@ -378,7 +372,6 @@ private:
int _captureBufSizeSamples;
int _renderBufSizeSamples;
-private:
// Typing detection
// 0x5c is key "9", after that comes function keys.
bool prev_key_state_[0x5d];
diff --git a/chromium/third_party/webrtc/modules/audio_device/main/source/OWNERS b/chromium/third_party/webrtc/modules/audio_device/main/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_device/main/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.cc b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.cc
index a71b821189a..32b5e49af1f 100644
--- a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.cc
@@ -751,25 +751,6 @@ bool AudioDeviceWindowsCore::Initialized() const
}
// ----------------------------------------------------------------------------
-// SpeakerIsAvailable
-// ----------------------------------------------------------------------------
-
-int32_t AudioDeviceWindowsCore::SpeakerIsAvailable(bool& available)
-{
-
- CriticalSectionScoped lock(&_critSect);
-
- if (_ptrDeviceOut == NULL)
- {
- return -1;
- }
-
- available = true;
-
- return 0;
-}
-
-// ----------------------------------------------------------------------------
// InitSpeaker
// ----------------------------------------------------------------------------
@@ -852,25 +833,6 @@ int32_t AudioDeviceWindowsCore::InitSpeaker()
}
// ----------------------------------------------------------------------------
-// MicrophoneIsAvailable
-// ----------------------------------------------------------------------------
-
-int32_t AudioDeviceWindowsCore::MicrophoneIsAvailable(bool& available)
-{
-
- CriticalSectionScoped lock(&_critSect);
-
- if (_ptrDeviceIn == NULL)
- {
- return -1;
- }
-
- available = true;
-
- return 0;
-}
-
-// ----------------------------------------------------------------------------
// InitMicrophone
// ----------------------------------------------------------------------------
diff --git a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.h b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.h
index 7a9a5245ca9..4d30928c5ed 100644
--- a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.h
+++ b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_core_win.h
@@ -138,10 +138,8 @@ public:
virtual int32_t WaveOutVolume(uint16_t& volumeLeft, uint16_t& volumeRight) const;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool& available);
virtual int32_t InitSpeaker();
virtual bool SpeakerIsInitialized() const;
- virtual int32_t MicrophoneIsAvailable(bool& available);
virtual int32_t InitMicrophone();
virtual bool MicrophoneIsInitialized() const;
diff --git a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc
index c05d0a29a7a..e2e515b1004 100644
--- a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc
+++ b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.cc
@@ -490,33 +490,6 @@ bool AudioDeviceWindowsWave::Initialized() const
}
// ----------------------------------------------------------------------------
-// SpeakerIsAvailable
-// ----------------------------------------------------------------------------
-
-int32_t AudioDeviceWindowsWave::SpeakerIsAvailable(bool& available)
-{
-
- // Enumerate all avaliable speakers and make an attempt to open up the
- // output mixer corresponding to the currently selected output device.
- //
- if (InitSpeaker() == -1)
- {
- available = false;
- return 0;
- }
-
- // Given that InitSpeaker was successful, we know that a valid speaker exists
- //
- available = true;
-
- // Close the initialized output mixer
- //
- _mixerManager.CloseSpeaker();
-
- return 0;
-}
-
-// ----------------------------------------------------------------------------
// InitSpeaker
// ----------------------------------------------------------------------------
@@ -555,33 +528,6 @@ int32_t AudioDeviceWindowsWave::InitSpeaker()
}
// ----------------------------------------------------------------------------
-// MicrophoneIsAvailable
-// ----------------------------------------------------------------------------
-
-int32_t AudioDeviceWindowsWave::MicrophoneIsAvailable(bool& available)
-{
-
- // Enumerate all avaliable microphones and make an attempt to open up the
- // input mixer corresponding to the currently selected output device.
- //
- if (InitMicrophone() == -1)
- {
- available = false;
- return 0;
- }
-
- // Given that InitMicrophone was successful, we know that a valid microphone exists
- //
- available = true;
-
- // Close the initialized input mixer
- //
- _mixerManager.CloseMicrophone();
-
- return 0;
-}
-
-// ----------------------------------------------------------------------------
// InitMicrophone
// ----------------------------------------------------------------------------
diff --git a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.h b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.h
index 480bbcbc778..1767b90a858 100644
--- a/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.h
+++ b/chromium/third_party/webrtc/modules/audio_device/win/audio_device_wave_win.h
@@ -94,10 +94,8 @@ public:
virtual int32_t WaveOutVolume(uint16_t& volumeLeft, uint16_t& volumeRight) const;
// Audio mixer initialization
- virtual int32_t SpeakerIsAvailable(bool& available);
virtual int32_t InitSpeaker();
virtual bool SpeakerIsInitialized() const;
- virtual int32_t MicrophoneIsAvailable(bool& available);
virtual int32_t InitMicrophone();
virtual bool MicrophoneIsInitialized() const;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/OWNERS b/chromium/third_party/webrtc/modules/audio_processing/OWNERS
index 5a2563444b6..7c1f7881c2c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/OWNERS
+++ b/chromium/third_party/webrtc/modules/audio_processing/OWNERS
@@ -1,2 +1,8 @@
+aluebs@webrtc.org
andrew@webrtc.org
bjornv@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/Android.mk b/chromium/third_party/webrtc/modules/audio_processing/aec/Android.mk
index 3ad52b96625..181e87d9a76 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/Android.mk
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/Android.mk
@@ -47,3 +47,12 @@ ifndef NDK_ROOT
include external/stlport/libstlport.mk
endif
include $(BUILD_STATIC_LIBRARY)
+
+#########################
+# Build the neon library.
+ifeq ($(WEBRTC_BUILD_NEON_LIBS),true)
+
+LOCAL_SRC_FILES += \
+ aec_core_neon.c
+
+endif # ifeq ($(WEBRTC_BUILD_NEON_LIBS),true)
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.c b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.c
index bfa087c23de..207c6dc3bfc 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.c
@@ -67,7 +67,7 @@ static const float sqrtHanning[65] = {
// Matlab code to produce table:
// weightCurve = [0 ; 0.3 * sqrt(linspace(0,1,64))' + 0.1];
// fprintf(1, '\t%.4f, %.4f, %.4f, %.4f, %.4f, %.4f,\n', weightCurve);
-const float WebRtcAec_weightCurve[65] = {
+ALIGN16_BEG const float ALIGN16_END WebRtcAec_weightCurve[65] = {
0.0000f, 0.1000f, 0.1378f, 0.1535f, 0.1655f, 0.1756f, 0.1845f, 0.1926f,
0.2000f, 0.2069f, 0.2134f, 0.2195f, 0.2254f, 0.2309f, 0.2363f, 0.2414f,
0.2464f, 0.2512f, 0.2558f, 0.2604f, 0.2648f, 0.2690f, 0.2732f, 0.2773f,
@@ -81,7 +81,7 @@ const float WebRtcAec_weightCurve[65] = {
// Matlab code to produce table:
// overDriveCurve = [sqrt(linspace(0,1,65))' + 1];
// fprintf(1, '\t%.4f, %.4f, %.4f, %.4f, %.4f, %.4f,\n', overDriveCurve);
-const float WebRtcAec_overDriveCurve[65] = {
+ALIGN16_BEG const float ALIGN16_END WebRtcAec_overDriveCurve[65] = {
1.0000f, 1.1250f, 1.1768f, 1.2165f, 1.2500f, 1.2795f, 1.3062f, 1.3307f,
1.3536f, 1.3750f, 1.3953f, 1.4146f, 1.4330f, 1.4507f, 1.4677f, 1.4841f,
1.5000f, 1.5154f, 1.5303f, 1.5449f, 1.5590f, 1.5728f, 1.5863f, 1.5995f,
@@ -116,7 +116,7 @@ extern int webrtc_aec_instance_count;
// "Private" function prototypes.
static void ProcessBlock(AecCore* aec);
-static void NonLinearProcessing(AecCore* aec, short* output, short* outputH);
+static void NonLinearProcessing(AecCore* aec, float* output, float* outputH);
static void GetHighbandGain(const float* lambda, float* nlpGainHband);
@@ -160,28 +160,28 @@ int WebRtcAec_CreateAec(AecCore** aecInst) {
return -1;
}
- aec->nearFrBuf = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(int16_t));
+ aec->nearFrBuf = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(float));
if (!aec->nearFrBuf) {
WebRtcAec_FreeAec(aec);
aec = NULL;
return -1;
}
- aec->outFrBuf = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(int16_t));
+ aec->outFrBuf = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(float));
if (!aec->outFrBuf) {
WebRtcAec_FreeAec(aec);
aec = NULL;
return -1;
}
- aec->nearFrBufH = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(int16_t));
+ aec->nearFrBufH = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(float));
if (!aec->nearFrBufH) {
WebRtcAec_FreeAec(aec);
aec = NULL;
return -1;
}
- aec->outFrBufH = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(int16_t));
+ aec->outFrBufH = WebRtc_CreateBuffer(FRAME_LEN + PART_LEN, sizeof(float));
if (!aec->outFrBufH) {
WebRtcAec_FreeAec(aec);
aec = NULL;
@@ -419,6 +419,7 @@ WebRtcAec_FilterFar_t WebRtcAec_FilterFar;
WebRtcAec_ScaleErrorSignal_t WebRtcAec_ScaleErrorSignal;
WebRtcAec_FilterAdaptation_t WebRtcAec_FilterAdaptation;
WebRtcAec_OverdriveAndSuppress_t WebRtcAec_OverdriveAndSuppress;
+WebRtcAec_ComfortNoise_t WebRtcAec_ComfortNoise;
int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
int i;
@@ -472,9 +473,21 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
aec->delay_logging_enabled = 0;
memset(aec->delay_histogram, 0, sizeof(aec->delay_histogram));
+ aec->reported_delay_enabled = 1;
aec->extended_filter_enabled = 0;
aec->num_partitions = kNormalNumPartitions;
+ // Update the delay estimator with filter length. We use half the
+ // |num_partitions| to take the echo path into account. In practice we say
+ // that the echo has a duration of maximum half |num_partitions|, which is not
+ // true, but serves as a crude measure.
+ WebRtc_set_allowed_offset(aec->delay_estimator, aec->num_partitions / 2);
+ // TODO(bjornv): I currently hard coded the enable. Once we've established
+ // that AECM has no performance regression, robust_validation will be enabled
+ // all the time and the APIs to turn it on/off will be removed. Hence, remove
+ // this line then.
+ WebRtc_enable_robust_validation(aec->delay_estimator, 1);
+
// Default target suppression mode.
aec->nlp_mode = 1;
@@ -557,6 +570,7 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
WebRtcAec_ScaleErrorSignal = ScaleErrorSignal;
WebRtcAec_FilterAdaptation = FilterAdaptation;
WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppress;
+ WebRtcAec_ComfortNoise = ComfortNoise;
#if defined(WEBRTC_ARCH_X86_FAMILY)
if (WebRtc_GetCPUInfo(kSSE2)) {
@@ -564,6 +578,14 @@ int WebRtcAec_InitAec(AecCore* aec, int sampFreq) {
}
#endif
+#if defined(MIPS_FPU_LE)
+ WebRtcAec_InitAec_mips();
+#endif
+
+#if defined(WEBRTC_DETECT_ARM_NEON) || defined(WEBRTC_ARCH_ARM_NEON)
+ WebRtcAec_InitAec_neon();
+#endif
+
aec_rdft_init();
return 0;
@@ -599,11 +621,11 @@ int WebRtcAec_MoveFarReadPtr(AecCore* aec, int elements) {
}
void WebRtcAec_ProcessFrame(AecCore* aec,
- const short* nearend,
- const short* nearendH,
+ const float* nearend,
+ const float* nearendH,
int knownDelay,
- int16_t* out,
- int16_t* outH) {
+ float* out,
+ float* outH) {
int out_elements = 0;
// For each frame the process is as follows:
@@ -724,7 +746,7 @@ int WebRtcAec_GetDelayMetricsCore(AecCore* self, int* median, int* std) {
// Calculate the L1 norm, with median value as central moment.
for (i = 0; i < kHistorySizeBlocks; i++) {
- l1_norm += (float)(fabs(i - my_median) * self->delay_histogram[i]);
+ l1_norm += (float)abs(i - my_median) * self->delay_histogram[i];
}
*std = (int)(l1_norm / (float)num_delay_values + 0.5f) * kMsPerBlock;
@@ -768,9 +790,19 @@ void WebRtcAec_SetConfigCore(AecCore* self,
}
}
+void WebRtcAec_enable_reported_delay(AecCore* self, int enable) {
+ self->reported_delay_enabled = enable;
+}
+
+int WebRtcAec_reported_delay_enabled(AecCore* self) {
+ return self->reported_delay_enabled;
+}
+
void WebRtcAec_enable_delay_correction(AecCore* self, int enable) {
self->extended_filter_enabled = enable;
self->num_partitions = enable ? kExtendedNumPartitions : kNormalNumPartitions;
+ // Update the delay estimator with filter length. See InitAEC() for details.
+ WebRtc_set_allowed_offset(self->delay_estimator, self->num_partitions / 2);
}
int WebRtcAec_delay_correction_enabled(AecCore* self) {
@@ -786,7 +818,7 @@ void WebRtcAec_SetSystemDelay(AecCore* self, int delay) {
static void ProcessBlock(AecCore* aec) {
int i;
- float d[PART_LEN], y[PART_LEN], e[PART_LEN], dH[PART_LEN];
+ float y[PART_LEN], e[PART_LEN];
float scale;
float fft[PART_LEN2];
@@ -805,30 +837,22 @@ static void ProcessBlock(AecCore* aec) {
const float ramp = 1.0002f;
const float gInitNoise[2] = {0.999f, 0.001f};
- int16_t nearend[PART_LEN];
- int16_t* nearend_ptr = NULL;
- int16_t output[PART_LEN];
- int16_t outputH[PART_LEN];
+ float nearend[PART_LEN];
+ float* nearend_ptr = NULL;
+ float output[PART_LEN];
+ float outputH[PART_LEN];
float* xf_ptr = NULL;
- memset(dH, 0, sizeof(dH));
+ // Concatenate old and new nearend blocks.
if (aec->sampFreq == 32000) {
- // Get the upper band first so we can reuse |nearend|.
WebRtc_ReadBuffer(aec->nearFrBufH, (void**)&nearend_ptr, nearend, PART_LEN);
- for (i = 0; i < PART_LEN; i++) {
- dH[i] = (float)(nearend_ptr[i]);
- }
- memcpy(aec->dBufH + PART_LEN, dH, sizeof(float) * PART_LEN);
+ memcpy(aec->dBufH + PART_LEN, nearend_ptr, sizeof(nearend));
}
WebRtc_ReadBuffer(aec->nearFrBuf, (void**)&nearend_ptr, nearend, PART_LEN);
+ memcpy(aec->dBuf + PART_LEN, nearend_ptr, sizeof(nearend));
// ---------- Ooura fft ----------
- // Concatenate old and new nearend blocks.
- for (i = 0; i < PART_LEN; i++) {
- d[i] = (float)(nearend_ptr[i]);
- }
- memcpy(aec->dBuf + PART_LEN, d, sizeof(float) * PART_LEN);
#ifdef WEBRTC_AEC_DEBUG_DUMP
{
@@ -940,7 +964,7 @@ static void ProcessBlock(AecCore* aec) {
}
for (i = 0; i < PART_LEN; i++) {
- e[i] = d[i] - y[i];
+ e[i] = nearend_ptr[i] - y[i];
}
// Error fft
@@ -999,7 +1023,7 @@ static void ProcessBlock(AecCore* aec) {
#endif
}
-static void NonLinearProcessing(AecCore* aec, short* output, short* outputH) {
+static void NonLinearProcessing(AecCore* aec, float* output, float* outputH) {
float efw[2][PART_LEN1], dfw[2][PART_LEN1], xfw[2][PART_LEN1];
complex_t comfortNoiseHband[PART_LEN1];
float fft[PART_LEN2];
@@ -1266,7 +1290,7 @@ static void NonLinearProcessing(AecCore* aec, short* output, short* outputH) {
WebRtcAec_OverdriveAndSuppress(aec, hNl, hNlFb, efw);
// Add comfort noise.
- ComfortNoise(aec, efw, comfortNoiseHband, aec->noisePow, hNl);
+ WebRtcAec_ComfortNoise(aec, efw, comfortNoiseHband, aec->noisePow, hNl);
// TODO(bjornv): Investigate how to take the windowing below into account if
// needed.
@@ -1293,12 +1317,12 @@ static void NonLinearProcessing(AecCore* aec, short* output, short* outputH) {
fft[i] *= scale; // fft scaling
fft[i] = fft[i] * sqrtHanning[i] + aec->outBuf[i];
- // Saturation protection
- output[i] = (short)WEBRTC_SPL_SAT(
- WEBRTC_SPL_WORD16_MAX, fft[i], WEBRTC_SPL_WORD16_MIN);
-
fft[PART_LEN + i] *= scale; // fft scaling
aec->outBuf[i] = fft[PART_LEN + i] * sqrtHanning[PART_LEN - i];
+
+ // Saturate output to keep it in the allowed range.
+ output[i] = WEBRTC_SPL_SAT(
+ WEBRTC_SPL_WORD16_MAX, fft[i], WEBRTC_SPL_WORD16_MIN);
}
// For H band
@@ -1323,8 +1347,8 @@ static void NonLinearProcessing(AecCore* aec, short* output, short* outputH) {
// compute gain factor
for (i = 0; i < PART_LEN; i++) {
- dtmp = (float)aec->dBufH[i];
- dtmp = (float)dtmp * nlpGainHband; // for variable gain
+ dtmp = aec->dBufH[i];
+ dtmp = dtmp * nlpGainHband; // for variable gain
// add some comfort noise where Hband is attenuated
if (flagHbandCn == 1) {
@@ -1332,8 +1356,8 @@ static void NonLinearProcessing(AecCore* aec, short* output, short* outputH) {
dtmp += cnScaleHband * fft[i];
}
- // Saturation protection
- outputH[i] = (short)WEBRTC_SPL_SAT(
+ // Saturate output to keep it in the allowed range.
+ outputH[i] = WEBRTC_SPL_SAT(
WEBRTC_SPL_WORD16_MAX, dtmp, WEBRTC_SPL_WORD16_MIN);
}
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h
index d3c6d7e2b2e..93bfed46688 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core.h
@@ -22,17 +22,6 @@
#define PART_LEN1 (PART_LEN + 1) // Unique fft coefficients
#define PART_LEN2 (PART_LEN * 2) // Length of partition * 2
-// Delay estimator constants, used for logging.
-enum {
- kMaxDelayBlocks = 60
-};
-enum {
- kLookaheadBlocks = 15
-};
-enum {
- kHistorySizeBlocks = kMaxDelayBlocks + kLookaheadBlocks
-};
-
typedef float complex_t[2];
// For performance reasons, some arrays of complex numbers are replaced by twice
// as long arrays of float, all the real parts followed by all the imaginary
@@ -65,14 +54,20 @@ int WebRtcAec_CreateAec(AecCore** aec);
int WebRtcAec_FreeAec(AecCore* aec);
int WebRtcAec_InitAec(AecCore* aec, int sampFreq);
void WebRtcAec_InitAec_SSE2(void);
+#if defined(MIPS_FPU_LE)
+void WebRtcAec_InitAec_mips(void);
+#endif
+#if defined(WEBRTC_DETECT_ARM_NEON) || defined(WEBRTC_ARCH_ARM_NEON)
+void WebRtcAec_InitAec_neon(void);
+#endif
void WebRtcAec_BufferFarendPartition(AecCore* aec, const float* farend);
void WebRtcAec_ProcessFrame(AecCore* aec,
- const short* nearend,
- const short* nearendH,
+ const float* nearend,
+ const float* nearendH,
int knownDelay,
- int16_t* out,
- int16_t* outH);
+ float* out,
+ float* outH);
// A helper function to call WebRtc_MoveReadPtr() for all far-end buffers.
// Returns the number of elements moved, and adjusts |system_delay| by the
@@ -101,6 +96,12 @@ void WebRtcAec_SetConfigCore(AecCore* self,
int metrics_mode,
int delay_logging);
+// Non-zero enables, zero disables.
+void WebRtcAec_enable_reported_delay(AecCore* self, int enable);
+
+// Returns non-zero if reported delay is enabled and zero if disabled.
+int WebRtcAec_reported_delay_enabled(AecCore* self);
+
// We now interpret delay correction to mean an extended filter length feature.
// We reuse the delay correction infrastructure to avoid changes through to
// libjingle. See details along with |DelayCorrection| in
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h
index 193369382ca..1c560f91c9c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_internal.h
@@ -26,6 +26,17 @@ enum {
};
static const int kNormalNumPartitions = 12;
+// Delay estimator constants, used for logging.
+enum {
+ kMaxDelayBlocks = 60
+};
+enum {
+ kLookaheadBlocks = 15
+};
+enum {
+ kHistorySizeBlocks = kMaxDelayBlocks + kLookaheadBlocks
+};
+
// Extended filter adaptation parameters.
// TODO(ajm): No narrowband tuning yet.
static const float kExtendedMu = 0.4f;
@@ -122,6 +133,7 @@ struct AecCore {
void* delay_estimator_farend;
void* delay_estimator;
+ int reported_delay_enabled; // 0 = disabled, otherwise enabled.
// 1 = extended filter mode enabled, 0 = disabled.
int extended_filter_enabled;
// Runtime selection of number of filter partitions.
@@ -151,4 +163,11 @@ typedef void (*WebRtcAec_OverdriveAndSuppress_t)(AecCore* aec,
float efw[2][PART_LEN1]);
extern WebRtcAec_OverdriveAndSuppress_t WebRtcAec_OverdriveAndSuppress;
+typedef void (*WebRtcAec_ComfortNoise_t)(AecCore* aec,
+ float efw[2][PART_LEN1],
+ complex_t* comfortNoiseHband,
+ const float* noisePow,
+ const float* lambda);
+extern WebRtcAec_ComfortNoise_t WebRtcAec_ComfortNoise;
+
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_CORE_INTERNAL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_mips.c b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_mips.c
new file mode 100644
index 00000000000..d861e10f908
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_mips.c
@@ -0,0 +1,774 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * The core AEC algorithm, which is presented with time-aligned signals.
+ */
+
+#include "webrtc/modules/audio_processing/aec/aec_core.h"
+
+#include <math.h>
+
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/modules/audio_processing/aec/aec_core_internal.h"
+#include "webrtc/modules/audio_processing/aec/aec_rdft.h"
+
+static const int flagHbandCn = 1; // flag for adding comfort noise in H band
+extern const float WebRtcAec_weightCurve[65];
+extern const float WebRtcAec_overDriveCurve[65];
+
+void WebRtcAec_ComfortNoise_mips(AecCore* aec,
+ float efw[2][PART_LEN1],
+ complex_t* comfortNoiseHband,
+ const float* noisePow,
+ const float* lambda) {
+ int i, num;
+ float rand[PART_LEN];
+ float noise, noiseAvg, tmp, tmpAvg;
+ int16_t randW16[PART_LEN];
+ complex_t u[PART_LEN1];
+
+ const float pi2 = 6.28318530717959f;
+ const float pi2t = pi2 / 32768;
+
+ // Generate a uniform random array on [0 1]
+ WebRtcSpl_RandUArray(randW16, PART_LEN, &aec->seed);
+
+ int16_t *randWptr = randW16;
+ float randTemp, randTemp2, randTemp3, randTemp4;
+ short tmp1s, tmp2s, tmp3s, tmp4s;
+
+ for (i = 0; i < PART_LEN; i+=4) {
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "lh %[tmp1s], 0(%[randWptr]) \n\t"
+ "lh %[tmp2s], 2(%[randWptr]) \n\t"
+ "lh %[tmp3s], 4(%[randWptr]) \n\t"
+ "lh %[tmp4s], 6(%[randWptr]) \n\t"
+ "mtc1 %[tmp1s], %[randTemp] \n\t"
+ "mtc1 %[tmp2s], %[randTemp2] \n\t"
+ "mtc1 %[tmp3s], %[randTemp3] \n\t"
+ "mtc1 %[tmp4s], %[randTemp4] \n\t"
+ "cvt.s.w %[randTemp], %[randTemp] \n\t"
+ "cvt.s.w %[randTemp2], %[randTemp2] \n\t"
+ "cvt.s.w %[randTemp3], %[randTemp3] \n\t"
+ "cvt.s.w %[randTemp4], %[randTemp4] \n\t"
+ "addiu %[randWptr], %[randWptr], 8 \n\t"
+ "mul.s %[randTemp], %[randTemp], %[pi2t] \n\t"
+ "mul.s %[randTemp2], %[randTemp2], %[pi2t] \n\t"
+ "mul.s %[randTemp3], %[randTemp3], %[pi2t] \n\t"
+ "mul.s %[randTemp4], %[randTemp4], %[pi2t] \n\t"
+ ".set pop \n\t"
+ : [randWptr] "+r" (randWptr), [randTemp] "=&f" (randTemp),
+ [randTemp2] "=&f" (randTemp2), [randTemp3] "=&f" (randTemp3),
+ [randTemp4] "=&f" (randTemp4), [tmp1s] "=&r" (tmp1s),
+ [tmp2s] "=&r" (tmp2s), [tmp3s] "=&r" (tmp3s),
+ [tmp4s] "=&r" (tmp4s)
+ : [pi2t] "f" (pi2t)
+ : "memory"
+ );
+
+ u[i+1][0] = (float)cos(randTemp);
+ u[i+1][1] = (float)sin(randTemp);
+ u[i+2][0] = (float)cos(randTemp2);
+ u[i+2][1] = (float)sin(randTemp2);
+ u[i+3][0] = (float)cos(randTemp3);
+ u[i+3][1] = (float)sin(randTemp3);
+ u[i+4][0] = (float)cos(randTemp4);
+ u[i+4][1] = (float)sin(randTemp4);
+ }
+
+ // Reject LF noise
+ float *u_ptr = &u[1][0];
+ float noise2, noise3, noise4;
+ float tmp1f, tmp2f, tmp3f, tmp4f, tmp5f, tmp6f, tmp7f, tmp8f;
+
+ u[0][0] = 0;
+ u[0][1] = 0;
+ for (i = 1; i < PART_LEN1; i+=4) {
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "lwc1 %[noise], 4(%[noisePow]) \n\t"
+ "lwc1 %[noise2], 8(%[noisePow]) \n\t"
+ "lwc1 %[noise3], 12(%[noisePow]) \n\t"
+ "lwc1 %[noise4], 16(%[noisePow]) \n\t"
+ "sqrt.s %[noise], %[noise] \n\t"
+ "sqrt.s %[noise2], %[noise2] \n\t"
+ "sqrt.s %[noise3], %[noise3] \n\t"
+ "sqrt.s %[noise4], %[noise4] \n\t"
+ "lwc1 %[tmp1f], 0(%[u_ptr]) \n\t"
+ "lwc1 %[tmp2f], 4(%[u_ptr]) \n\t"
+ "lwc1 %[tmp3f], 8(%[u_ptr]) \n\t"
+ "lwc1 %[tmp4f], 12(%[u_ptr]) \n\t"
+ "lwc1 %[tmp5f], 16(%[u_ptr]) \n\t"
+ "lwc1 %[tmp6f], 20(%[u_ptr]) \n\t"
+ "lwc1 %[tmp7f], 24(%[u_ptr]) \n\t"
+ "lwc1 %[tmp8f], 28(%[u_ptr]) \n\t"
+ "addiu %[noisePow], %[noisePow], 16 \n\t"
+ "mul.s %[tmp1f], %[tmp1f], %[noise] \n\t"
+ "mul.s %[tmp2f], %[tmp2f], %[noise] \n\t"
+ "mul.s %[tmp3f], %[tmp3f], %[noise2] \n\t"
+ "mul.s %[tmp4f], %[tmp4f], %[noise2] \n\t"
+ "mul.s %[tmp5f], %[tmp5f], %[noise3] \n\t"
+ "mul.s %[tmp6f], %[tmp6f], %[noise3] \n\t"
+ "swc1 %[tmp1f], 0(%[u_ptr]) \n\t"
+ "swc1 %[tmp3f], 8(%[u_ptr]) \n\t"
+ "mul.s %[tmp8f], %[tmp8f], %[noise4] \n\t"
+ "mul.s %[tmp7f], %[tmp7f], %[noise4] \n\t"
+ "neg.s %[tmp2f] \n\t"
+ "neg.s %[tmp4f] \n\t"
+ "neg.s %[tmp6f] \n\t"
+ "neg.s %[tmp8f] \n\t"
+ "swc1 %[tmp5f], 16(%[u_ptr]) \n\t"
+ "swc1 %[tmp7f], 24(%[u_ptr]) \n\t"
+ "swc1 %[tmp2f], 4(%[u_ptr]) \n\t"
+ "swc1 %[tmp4f], 12(%[u_ptr]) \n\t"
+ "swc1 %[tmp6f], 20(%[u_ptr]) \n\t"
+ "swc1 %[tmp8f], 28(%[u_ptr]) \n\t"
+ "addiu %[u_ptr], %[u_ptr], 32 \n\t"
+ ".set pop \n\t"
+ : [u_ptr] "+r" (u_ptr), [noisePow] "+r" (noisePow),
+ [noise] "=&f" (noise), [noise2] "=&f" (noise2),
+ [noise3] "=&f" (noise3), [noise4] "=&f" (noise4),
+ [tmp1f] "=&f" (tmp1f), [tmp2f] "=&f" (tmp2f),
+ [tmp3f] "=&f" (tmp3f), [tmp4f] "=&f" (tmp4f),
+ [tmp5f] "=&f" (tmp5f), [tmp6f] "=&f" (tmp6f),
+ [tmp7f] "=&f" (tmp7f), [tmp8f] "=&f" (tmp8f)
+ :
+ : "memory"
+ );
+ }
+ u[PART_LEN][1] = 0;
+ noisePow -= PART_LEN;
+
+ u_ptr = &u[0][0];
+ float *u_ptr_end = &u[PART_LEN][0];
+ float *efw_ptr_0 = &efw[0][0];
+ float *efw_ptr_1 = &efw[1][0];
+ float tmp9f, tmp10f;
+ const float tmp1c = 1.0;
+ const float tmp2c = 0.0;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "1: \n\t"
+ "lwc1 %[tmp1f], 0(%[lambda]) \n\t"
+ "lwc1 %[tmp6f], 4(%[lambda]) \n\t"
+ "addiu %[lambda], %[lambda], 8 \n\t"
+ "c.lt.s %[tmp1f], %[tmp1c] \n\t"
+ "bc1f 4f \n\t"
+ " nop \n\t"
+ "c.lt.s %[tmp6f], %[tmp1c] \n\t"
+ "bc1f 3f \n\t"
+ " nop \n\t"
+ "2: \n\t"
+ "mul.s %[tmp1f], %[tmp1f], %[tmp1f] \n\t"
+ "mul.s %[tmp6f], %[tmp6f], %[tmp6f] \n\t"
+ "sub.s %[tmp1f], %[tmp1c], %[tmp1f] \n\t"
+ "sub.s %[tmp6f], %[tmp1c], %[tmp6f] \n\t"
+ "sqrt.s %[tmp1f], %[tmp1f] \n\t"
+ "sqrt.s %[tmp6f], %[tmp6f] \n\t"
+ "lwc1 %[tmp2f], 0(%[efw_ptr_0]) \n\t"
+ "lwc1 %[tmp3f], 0(%[u_ptr]) \n\t"
+ "lwc1 %[tmp7f], 4(%[efw_ptr_0]) \n\t"
+ "lwc1 %[tmp8f], 8(%[u_ptr]) \n\t"
+ "lwc1 %[tmp4f], 0(%[efw_ptr_1]) \n\t"
+ "lwc1 %[tmp5f], 4(%[u_ptr]) \n\t"
+ "lwc1 %[tmp9f], 4(%[efw_ptr_1]) \n\t"
+ "lwc1 %[tmp10f], 12(%[u_ptr]) \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[tmp3f], %[tmp1f], %[tmp3f] \n\t"
+ "add.s %[tmp2f], %[tmp2f], %[tmp3f] \n\t"
+ "mul.s %[tmp3f], %[tmp1f], %[tmp5f] \n\t"
+ "add.s %[tmp4f], %[tmp4f], %[tmp3f] \n\t"
+ "mul.s %[tmp3f], %[tmp6f], %[tmp8f] \n\t"
+ "add.s %[tmp7f], %[tmp7f], %[tmp3f] \n\t"
+ "mul.s %[tmp3f], %[tmp6f], %[tmp10f] \n\t"
+ "add.s %[tmp9f], %[tmp9f], %[tmp3f] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "madd.s %[tmp2f], %[tmp2f], %[tmp1f], %[tmp3f] \n\t"
+ "madd.s %[tmp4f], %[tmp4f], %[tmp1f], %[tmp5f] \n\t"
+ "madd.s %[tmp7f], %[tmp7f], %[tmp6f], %[tmp8f] \n\t"
+ "madd.s %[tmp9f], %[tmp9f], %[tmp6f], %[tmp10f] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "swc1 %[tmp2f], 0(%[efw_ptr_0]) \n\t"
+ "swc1 %[tmp4f], 0(%[efw_ptr_1]) \n\t"
+ "swc1 %[tmp7f], 4(%[efw_ptr_0]) \n\t"
+ "b 5f \n\t"
+ " swc1 %[tmp9f], 4(%[efw_ptr_1]) \n\t"
+ "3: \n\t"
+ "mul.s %[tmp1f], %[tmp1f], %[tmp1f] \n\t"
+ "sub.s %[tmp1f], %[tmp1c], %[tmp1f] \n\t"
+ "sqrt.s %[tmp1f], %[tmp1f] \n\t"
+ "lwc1 %[tmp2f], 0(%[efw_ptr_0]) \n\t"
+ "lwc1 %[tmp3f], 0(%[u_ptr]) \n\t"
+ "lwc1 %[tmp4f], 0(%[efw_ptr_1]) \n\t"
+ "lwc1 %[tmp5f], 4(%[u_ptr]) \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[tmp3f], %[tmp1f], %[tmp3f] \n\t"
+ "add.s %[tmp2f], %[tmp2f], %[tmp3f] \n\t"
+ "mul.s %[tmp3f], %[tmp1f], %[tmp5f] \n\t"
+ "add.s %[tmp4f], %[tmp4f], %[tmp3f] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "madd.s %[tmp2f], %[tmp2f], %[tmp1f], %[tmp3f] \n\t"
+ "madd.s %[tmp4f], %[tmp4f], %[tmp1f], %[tmp5f] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "swc1 %[tmp2f], 0(%[efw_ptr_0]) \n\t"
+ "b 5f \n\t"
+ " swc1 %[tmp4f], 0(%[efw_ptr_1]) \n\t"
+ "4: \n\t"
+ "c.lt.s %[tmp6f], %[tmp1c] \n\t"
+ "bc1f 5f \n\t"
+ " nop \n\t"
+ "mul.s %[tmp6f], %[tmp6f], %[tmp6f] \n\t"
+ "sub.s %[tmp6f], %[tmp1c], %[tmp6f] \n\t"
+ "sqrt.s %[tmp6f], %[tmp6f] \n\t"
+ "lwc1 %[tmp7f], 4(%[efw_ptr_0]) \n\t"
+ "lwc1 %[tmp8f], 8(%[u_ptr]) \n\t"
+ "lwc1 %[tmp9f], 4(%[efw_ptr_1]) \n\t"
+ "lwc1 %[tmp10f], 12(%[u_ptr]) \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[tmp3f], %[tmp6f], %[tmp8f] \n\t"
+ "add.s %[tmp7f], %[tmp7f], %[tmp3f] \n\t"
+ "mul.s %[tmp3f], %[tmp6f], %[tmp10f] \n\t"
+ "add.s %[tmp9f], %[tmp9f], %[tmp3f] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "madd.s %[tmp7f], %[tmp7f], %[tmp6f], %[tmp8f] \n\t"
+ "madd.s %[tmp9f], %[tmp9f], %[tmp6f], %[tmp10f] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "swc1 %[tmp7f], 4(%[efw_ptr_0]) \n\t"
+ "swc1 %[tmp9f], 4(%[efw_ptr_1]) \n\t"
+ "5: \n\t"
+ "addiu %[u_ptr], %[u_ptr], 16 \n\t"
+ "addiu %[efw_ptr_0], %[efw_ptr_0], 8 \n\t"
+ "bne %[u_ptr], %[u_ptr_end], 1b \n\t"
+ " addiu %[efw_ptr_1], %[efw_ptr_1], 8 \n\t"
+ ".set pop \n\t"
+ : [lambda] "+r" (lambda), [u_ptr] "+r" (u_ptr),
+ [efw_ptr_0] "+r" (efw_ptr_0), [efw_ptr_1] "+r" (efw_ptr_1),
+ [tmp1f] "=&f" (tmp1f), [tmp2f] "=&f" (tmp2f), [tmp3f] "=&f" (tmp3f),
+ [tmp4f] "=&f" (tmp4f), [tmp5f] "=&f" (tmp5f),
+ [tmp6f] "=&f" (tmp6f), [tmp7f] "=&f" (tmp7f), [tmp8f] "=&f" (tmp8f),
+ [tmp9f] "=&f" (tmp9f), [tmp10f] "=&f" (tmp10f)
+ : [tmp1c] "f" (tmp1c), [tmp2c] "f" (tmp2c), [u_ptr_end] "r" (u_ptr_end)
+ : "memory"
+ );
+
+ lambda -= PART_LEN;
+ tmp = sqrtf(WEBRTC_SPL_MAX(1 - lambda[PART_LEN] * lambda[PART_LEN], 0));
+ //tmp = 1 - lambda[i];
+ efw[0][PART_LEN] += tmp * u[PART_LEN][0];
+ efw[1][PART_LEN] += tmp * u[PART_LEN][1];
+
+ // For H band comfort noise
+ // TODO: don't compute noise and "tmp" twice. Use the previous results.
+ noiseAvg = 0.0;
+ tmpAvg = 0.0;
+ num = 0;
+ if (aec->sampFreq == 32000 && flagHbandCn == 1) {
+ for (i = 0; i < PART_LEN; i++) {
+ rand[i] = ((float)randW16[i]) / 32768;
+ }
+
+ // average noise scale
+ // average over second half of freq spectrum (i.e., 4->8khz)
+ // TODO: we shouldn't need num. We know how many elements we're summing.
+ for (i = PART_LEN1 >> 1; i < PART_LEN1; i++) {
+ num++;
+ noiseAvg += sqrtf(noisePow[i]);
+ }
+ noiseAvg /= (float)num;
+
+ // average nlp scale
+ // average over second half of freq spectrum (i.e., 4->8khz)
+ // TODO: we shouldn't need num. We know how many elements we're summing.
+ num = 0;
+ for (i = PART_LEN1 >> 1; i < PART_LEN1; i++) {
+ num++;
+ tmpAvg += sqrtf(WEBRTC_SPL_MAX(1 - lambda[i] * lambda[i], 0));
+ }
+ tmpAvg /= (float)num;
+
+ // Use average noise for H band
+ // TODO: we should probably have a new random vector here.
+ // Reject LF noise
+ u[0][0] = 0;
+ u[0][1] = 0;
+ for (i = 1; i < PART_LEN1; i++) {
+ tmp = pi2 * rand[i - 1];
+
+ // Use average noise for H band
+ u[i][0] = noiseAvg * (float)cos(tmp);
+ u[i][1] = -noiseAvg * (float)sin(tmp);
+ }
+ u[PART_LEN][1] = 0;
+
+ for (i = 0; i < PART_LEN1; i++) {
+ // Use average NLP weight for H band
+ comfortNoiseHband[i][0] = tmpAvg * u[i][0];
+ comfortNoiseHband[i][1] = tmpAvg * u[i][1];
+ }
+ }
+}
+
+void WebRtcAec_FilterFar_mips(AecCore *aec, float yf[2][PART_LEN1]) {
+ int i;
+ for (i = 0; i < aec->num_partitions; i++) {
+ int xPos = (i + aec->xfBufBlockPos) * PART_LEN1;
+ int pos = i * PART_LEN1;
+ // Check for wrap
+ if (i + aec->xfBufBlockPos >= aec->num_partitions) {
+ xPos -= aec->num_partitions * (PART_LEN1);
+ }
+ float *yf0 = yf[0];
+ float *yf1 = yf[1];
+ float *aRe = aec->xfBuf[0] + xPos;
+ float *aIm = aec->xfBuf[1] + xPos;
+ float *bRe = aec->wfBuf[0] + pos;
+ float *bIm = aec->wfBuf[1] + pos;
+ float f0, f1, f2, f3, f4, f5, f6, f7, f8, f9, f10, f11, f12, f13;
+ int len = PART_LEN1 >> 1;
+ int len1 = PART_LEN1 & 1;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "1: \n\t"
+ "lwc1 %[f0], 0(%[aRe]) \n\t"
+ "lwc1 %[f1], 0(%[bRe]) \n\t"
+ "lwc1 %[f2], 0(%[bIm]) \n\t"
+ "lwc1 %[f3], 0(%[aIm]) \n\t"
+ "lwc1 %[f4], 4(%[aRe]) \n\t"
+ "lwc1 %[f5], 4(%[bRe]) \n\t"
+ "lwc1 %[f6], 4(%[bIm]) \n\t"
+ "mul.s %[f8], %[f0], %[f1] \n\t"
+ "mul.s %[f0], %[f0], %[f2] \n\t"
+ "mul.s %[f9], %[f4], %[f5] \n\t"
+ "mul.s %[f4], %[f4], %[f6] \n\t"
+ "lwc1 %[f7], 4(%[aIm]) \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[f12], %[f2], %[f3] \n\t"
+ "mul.s %[f1], %[f3], %[f1] \n\t"
+ "mul.s %[f11], %[f6], %[f7] \n\t"
+ "addiu %[aRe], %[aRe], 8 \n\t"
+ "addiu %[aIm], %[aIm], 8 \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "sub.s %[f8], %[f8], %[f12] \n\t"
+ "mul.s %[f12], %[f7], %[f5] \n\t"
+ "lwc1 %[f2], 0(%[yf0]) \n\t"
+ "add.s %[f1], %[f0], %[f1] \n\t"
+ "lwc1 %[f3], 0(%[yf1]) \n\t"
+ "sub.s %[f9], %[f9], %[f11] \n\t"
+ "lwc1 %[f6], 4(%[yf0]) \n\t"
+ "add.s %[f4], %[f4], %[f12] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "addiu %[aRe], %[aRe], 8 \n\t"
+ "addiu %[aIm], %[aIm], 8 \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "nmsub.s %[f8], %[f8], %[f2], %[f3] \n\t"
+ "lwc1 %[f2], 0(%[yf0]) \n\t"
+ "madd.s %[f1], %[f0], %[f3], %[f1] \n\t"
+ "lwc1 %[f3], 0(%[yf1]) \n\t"
+ "nmsub.s %[f9], %[f9], %[f6], %[f7] \n\t"
+ "lwc1 %[f6], 4(%[yf0]) \n\t"
+ "madd.s %[f4], %[f4], %[f7], %[f5] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "lwc1 %[f5], 4(%[yf1]) \n\t"
+ "add.s %[f2], %[f2], %[f8] \n\t"
+ "addiu %[bRe], %[bRe], 8 \n\t"
+ "addiu %[bIm], %[bIm], 8 \n\t"
+ "add.s %[f3], %[f3], %[f1] \n\t"
+ "add.s %[f6], %[f6], %[f9] \n\t"
+ "add.s %[f5], %[f5], %[f4] \n\t"
+ "swc1 %[f2], 0(%[yf0]) \n\t"
+ "swc1 %[f3], 0(%[yf1]) \n\t"
+ "swc1 %[f6], 4(%[yf0]) \n\t"
+ "swc1 %[f5], 4(%[yf1]) \n\t"
+ "addiu %[yf0], %[yf0], 8 \n\t"
+ "bgtz %[len], 1b \n\t"
+ " addiu %[yf1], %[yf1], 8 \n\t"
+ "lwc1 %[f0], 0(%[aRe]) \n\t"
+ "lwc1 %[f1], 0(%[bRe]) \n\t"
+ "lwc1 %[f2], 0(%[bIm]) \n\t"
+ "lwc1 %[f3], 0(%[aIm]) \n\t"
+ "mul.s %[f8], %[f0], %[f1] \n\t"
+ "mul.s %[f0], %[f0], %[f2] \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[f12], %[f2], %[f3] \n\t"
+ "mul.s %[f1], %[f3], %[f1] \n\t"
+ "sub.s %[f8], %[f8], %[f12] \n\t"
+ "lwc1 %[f2], 0(%[yf0]) \n\t"
+ "add.s %[f1], %[f0], %[f1] \n\t"
+ "lwc1 %[f3], 0(%[yf1]) \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "nmsub.s %[f8], %[f8], %[f2], %[f3] \n\t"
+ "lwc1 %[f2], 0(%[yf0]) \n\t"
+ "madd.s %[f1], %[f0], %[f3], %[f1] \n\t"
+ "lwc1 %[f3], 0(%[yf1]) \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "add.s %[f2], %[f2], %[f8] \n\t"
+ "add.s %[f3], %[f3], %[f1] \n\t"
+ "swc1 %[f2], 0(%[yf0]) \n\t"
+ "swc1 %[f3], 0(%[yf1]) \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2),
+ [f3] "=&f" (f3), [f4] "=&f" (f4), [f5] "=&f" (f5),
+ [f6] "=&f" (f6), [f7] "=&f" (f7), [f8] "=&f" (f8),
+ [f9] "=&f" (f9), [f10] "=&f" (f10), [f11] "=&f" (f11),
+ [f12] "=&f" (f12), [f13] "=&f" (f13), [aRe] "+r" (aRe),
+ [aIm] "+r" (aIm), [bRe] "+r" (bRe), [bIm] "+r" (bIm),
+ [yf0] "+r" (yf0), [yf1] "+r" (yf1), [len] "+r" (len)
+ : [len1] "r" (len1)
+ : "memory"
+ );
+ }
+}
+
+void WebRtcAec_FilterAdaptation_mips(AecCore *aec,
+ float *fft,
+ float ef[2][PART_LEN1]) {
+ int i;
+ for (i = 0; i < aec->num_partitions; i++) {
+ int xPos = (i + aec->xfBufBlockPos)*(PART_LEN1);
+ int pos;
+ // Check for wrap
+ if (i + aec->xfBufBlockPos >= aec->num_partitions) {
+ xPos -= aec->num_partitions * PART_LEN1;
+ }
+
+ pos = i * PART_LEN1;
+ float *aRe = aec->xfBuf[0] + xPos;
+ float *aIm = aec->xfBuf[1] + xPos;
+ float *bRe = ef[0];
+ float *bIm = ef[1];
+ float *fft_tmp = fft;
+
+ float f0, f1, f2, f3, f4, f5, f6 ,f7, f8, f9, f10, f11, f12;
+ int len = PART_LEN >> 1;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "1: \n\t"
+ "lwc1 %[f0], 0(%[aRe]) \n\t"
+ "lwc1 %[f1], 0(%[bRe]) \n\t"
+ "lwc1 %[f2], 0(%[bIm]) \n\t"
+ "lwc1 %[f4], 4(%[aRe]) \n\t"
+ "lwc1 %[f5], 4(%[bRe]) \n\t"
+ "lwc1 %[f6], 4(%[bIm]) \n\t"
+ "addiu %[aRe], %[aRe], 8 \n\t"
+ "addiu %[bRe], %[bRe], 8 \n\t"
+ "mul.s %[f8], %[f0], %[f1] \n\t"
+ "mul.s %[f0], %[f0], %[f2] \n\t"
+ "lwc1 %[f3], 0(%[aIm]) \n\t"
+ "mul.s %[f9], %[f4], %[f5] \n\t"
+ "lwc1 %[f7], 4(%[aIm]) \n\t"
+ "mul.s %[f4], %[f4], %[f6] \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[f10], %[f3], %[f2] \n\t"
+ "mul.s %[f1], %[f3], %[f1] \n\t"
+ "mul.s %[f11], %[f7], %[f6] \n\t"
+ "mul.s %[f5], %[f7], %[f5] \n\t"
+ "addiu %[aIm], %[aIm], 8 \n\t"
+ "addiu %[bIm], %[bIm], 8 \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "add.s %[f8], %[f8], %[f10] \n\t"
+ "sub.s %[f1], %[f0], %[f1] \n\t"
+ "add.s %[f9], %[f9], %[f11] \n\t"
+ "sub.s %[f5], %[f4], %[f5] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "addiu %[aIm], %[aIm], 8 \n\t"
+ "addiu %[bIm], %[bIm], 8 \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "madd.s %[f8], %[f8], %[f3], %[f2] \n\t"
+ "nmsub.s %[f1], %[f0], %[f3], %[f1] \n\t"
+ "madd.s %[f9], %[f9], %[f7], %[f6] \n\t"
+ "nmsub.s %[f5], %[f4], %[f7], %[f5] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "swc1 %[f8], 0(%[fft_tmp]) \n\t"
+ "swc1 %[f1], 4(%[fft_tmp]) \n\t"
+ "swc1 %[f9], 8(%[fft_tmp]) \n\t"
+ "swc1 %[f5], 12(%[fft_tmp]) \n\t"
+ "bgtz %[len], 1b \n\t"
+ " addiu %[fft_tmp], %[fft_tmp], 16 \n\t"
+ "lwc1 %[f0], 0(%[aRe]) \n\t"
+ "lwc1 %[f1], 0(%[bRe]) \n\t"
+ "lwc1 %[f2], 0(%[bIm]) \n\t"
+ "lwc1 %[f3], 0(%[aIm]) \n\t"
+ "mul.s %[f8], %[f0], %[f1] \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[f10], %[f3], %[f2] \n\t"
+ "add.s %[f8], %[f8], %[f10] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "madd.s %[f8], %[f8], %[f3], %[f2] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "swc1 %[f8], 4(%[fft]) \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2),
+ [f3] "=&f" (f3), [f4] "=&f" (f4), [f5] "=&f" (f5),
+ [f6] "=&f" (f6), [f7] "=&f" (f7), [f8] "=&f" (f8),
+ [f9] "=&f" (f9), [f10] "=&f" (f10), [f11] "=&f" (f11),
+ [f12] "=&f" (f12), [aRe] "+r" (aRe), [aIm] "+r" (aIm),
+ [bRe] "+r" (bRe), [bIm] "+r" (bIm), [fft_tmp] "+r" (fft_tmp),
+ [len] "+r" (len), [fft] "=&r" (fft)
+ :
+ : "memory"
+ );
+
+ aec_rdft_inverse_128(fft);
+ memset(fft + PART_LEN, 0, sizeof(float) * PART_LEN);
+
+ // fft scaling
+ {
+ float scale = 2.0f / PART_LEN2;
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "addiu %[fft_tmp], %[fft], 0 \n\t"
+ "addiu %[len], $zero, 8 \n\t"
+ "1: \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "lwc1 %[f0], 0(%[fft_tmp]) \n\t"
+ "lwc1 %[f1], 4(%[fft_tmp]) \n\t"
+ "lwc1 %[f2], 8(%[fft_tmp]) \n\t"
+ "lwc1 %[f3], 12(%[fft_tmp]) \n\t"
+ "mul.s %[f0], %[f0], %[scale] \n\t"
+ "mul.s %[f1], %[f1], %[scale] \n\t"
+ "mul.s %[f2], %[f2], %[scale] \n\t"
+ "mul.s %[f3], %[f3], %[scale] \n\t"
+ "lwc1 %[f4], 16(%[fft_tmp]) \n\t"
+ "lwc1 %[f5], 20(%[fft_tmp]) \n\t"
+ "lwc1 %[f6], 24(%[fft_tmp]) \n\t"
+ "lwc1 %[f7], 28(%[fft_tmp]) \n\t"
+ "mul.s %[f4], %[f4], %[scale] \n\t"
+ "mul.s %[f5], %[f5], %[scale] \n\t"
+ "mul.s %[f6], %[f6], %[scale] \n\t"
+ "mul.s %[f7], %[f7], %[scale] \n\t"
+ "swc1 %[f0], 0(%[fft_tmp]) \n\t"
+ "swc1 %[f1], 4(%[fft_tmp]) \n\t"
+ "swc1 %[f2], 8(%[fft_tmp]) \n\t"
+ "swc1 %[f3], 12(%[fft_tmp]) \n\t"
+ "swc1 %[f4], 16(%[fft_tmp]) \n\t"
+ "swc1 %[f5], 20(%[fft_tmp]) \n\t"
+ "swc1 %[f6], 24(%[fft_tmp]) \n\t"
+ "swc1 %[f7], 28(%[fft_tmp]) \n\t"
+ "bgtz %[len], 1b \n\t"
+ " addiu %[fft_tmp], %[fft_tmp], 32 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2),
+ [f3] "=&f" (f3), [f4] "=&f" (f4), [f5] "=&f" (f5),
+ [f6] "=&f" (f6), [f7] "=&f" (f7), [len] "=&r" (len),
+ [fft_tmp] "=&r" (fft_tmp)
+ : [scale] "f" (scale), [fft] "r" (fft)
+ : "memory"
+ );
+ }
+ aec_rdft_forward_128(fft);
+ aRe = aec->wfBuf[0] + pos;
+ aIm = aec->wfBuf[1] + pos;
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "addiu %[fft_tmp], %[fft], 0 \n\t"
+ "addiu %[len], $zero, 31 \n\t"
+ "lwc1 %[f0], 0(%[aRe]) \n\t"
+ "lwc1 %[f1], 0(%[fft_tmp]) \n\t"
+ "lwc1 %[f2], 256(%[aRe]) \n\t"
+ "lwc1 %[f3], 4(%[fft_tmp]) \n\t"
+ "lwc1 %[f4], 4(%[aRe]) \n\t"
+ "lwc1 %[f5], 8(%[fft_tmp]) \n\t"
+ "lwc1 %[f6], 4(%[aIm]) \n\t"
+ "lwc1 %[f7], 12(%[fft_tmp]) \n\t"
+ "add.s %[f0], %[f0], %[f1] \n\t"
+ "add.s %[f2], %[f2], %[f3] \n\t"
+ "add.s %[f4], %[f4], %[f5] \n\t"
+ "add.s %[f6], %[f6], %[f7] \n\t"
+ "addiu %[fft_tmp], %[fft_tmp], 16 \n\t"
+ "swc1 %[f0], 0(%[aRe]) \n\t"
+ "swc1 %[f2], 256(%[aRe]) \n\t"
+ "swc1 %[f4], 4(%[aRe]) \n\t"
+ "addiu %[aRe], %[aRe], 8 \n\t"
+ "swc1 %[f6], 4(%[aIm]) \n\t"
+ "addiu %[aIm], %[aIm], 8 \n\t"
+ "1: \n\t"
+ "lwc1 %[f0], 0(%[aRe]) \n\t"
+ "lwc1 %[f1], 0(%[fft_tmp]) \n\t"
+ "lwc1 %[f2], 0(%[aIm]) \n\t"
+ "lwc1 %[f3], 4(%[fft_tmp]) \n\t"
+ "lwc1 %[f4], 4(%[aRe]) \n\t"
+ "lwc1 %[f5], 8(%[fft_tmp]) \n\t"
+ "lwc1 %[f6], 4(%[aIm]) \n\t"
+ "lwc1 %[f7], 12(%[fft_tmp]) \n\t"
+ "add.s %[f0], %[f0], %[f1] \n\t"
+ "add.s %[f2], %[f2], %[f3] \n\t"
+ "add.s %[f4], %[f4], %[f5] \n\t"
+ "add.s %[f6], %[f6], %[f7] \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "addiu %[fft_tmp], %[fft_tmp], 16 \n\t"
+ "swc1 %[f0], 0(%[aRe]) \n\t"
+ "swc1 %[f2], 0(%[aIm]) \n\t"
+ "swc1 %[f4], 4(%[aRe]) \n\t"
+ "addiu %[aRe], %[aRe], 8 \n\t"
+ "swc1 %[f6], 4(%[aIm]) \n\t"
+ "bgtz %[len], 1b \n\t"
+ " addiu %[aIm], %[aIm], 8 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2),
+ [f3] "=&f" (f3), [f4] "=&f" (f4), [f5] "=&f" (f5),
+ [f6] "=&f" (f6), [f7] "=&f" (f7), [len] "=&r" (len),
+ [fft_tmp] "=&r" (fft_tmp)
+ : [aRe] "r" (aRe), [aIm] "r" (aIm), [fft] "r" (fft)
+ : "memory"
+ );
+ }
+}
+
+void WebRtcAec_OverdriveAndSuppress_mips(AecCore *aec,
+ float hNl[PART_LEN1],
+ const float hNlFb,
+ float efw[2][PART_LEN1]) {
+ int i;
+ const float one = 1.0;
+ float *p_hNl, *p_efw0, *p_efw1;
+ float *p_WebRtcAec_wC;
+ float temp1, temp2, temp3, temp4;
+
+ p_hNl = &hNl[0];
+ p_efw0 = &efw[0][0];
+ p_efw1 = &efw[1][0];
+ p_WebRtcAec_wC = (float*)&WebRtcAec_weightCurve[0];
+
+ for (i = 0; i < PART_LEN1; i++) {
+ // Weight subbands
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "lwc1 %[temp1], 0(%[p_hNl]) \n\t"
+ "lwc1 %[temp2], 0(%[p_wC]) \n\t"
+ "c.lt.s %[hNlFb], %[temp1] \n\t"
+ "bc1f 1f \n\t"
+ " mul.s %[temp3], %[temp2], %[hNlFb] \n\t"
+ "sub.s %[temp4], %[one], %[temp2] \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[temp1], %[temp1], %[temp4] \n\t"
+ "add.s %[temp1], %[temp3], %[temp1] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "madd.s %[temp1], %[temp3], %[temp1], %[temp4] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "swc1 %[temp1], 0(%[p_hNl]) \n\t"
+ "1: \n\t"
+ "addiu %[p_wC], %[p_wC], 4 \n\t"
+ ".set pop \n\t"
+ : [temp1] "=&f" (temp1), [temp2] "=&f" (temp2), [temp3] "=&f" (temp3),
+ [temp4] "=&f" (temp4), [p_wC] "+r" (p_WebRtcAec_wC)
+ : [hNlFb] "f" (hNlFb), [one] "f" (one), [p_hNl] "r" (p_hNl)
+ : "memory"
+ );
+
+ hNl[i] = powf(hNl[i], aec->overDriveSm * WebRtcAec_overDriveCurve[i]);
+
+ __asm __volatile (
+ "lwc1 %[temp1], 0(%[p_hNl]) \n\t"
+ "lwc1 %[temp3], 0(%[p_efw1]) \n\t"
+ "lwc1 %[temp2], 0(%[p_efw0]) \n\t"
+ "addiu %[p_hNl], %[p_hNl], 4 \n\t"
+ "mul.s %[temp3], %[temp3], %[temp1] \n\t"
+ "mul.s %[temp2], %[temp2], %[temp1] \n\t"
+ "addiu %[p_efw0], %[p_efw0], 4 \n\t"
+ "addiu %[p_efw1], %[p_efw1], 4 \n\t"
+ "neg.s %[temp4], %[temp3] \n\t"
+ "swc1 %[temp2], -4(%[p_efw0]) \n\t"
+ "swc1 %[temp4], -4(%[p_efw1]) \n\t"
+ : [temp1] "=&f" (temp1), [temp2] "=&f" (temp2), [temp3] "=&f" (temp3),
+ [temp4] "=&f" (temp4), [p_efw0] "+r" (p_efw0), [p_efw1] "+r" (p_efw1),
+ [p_hNl] "+r" (p_hNl)
+ :
+ : "memory"
+ );
+ }
+}
+
+void WebRtcAec_ScaleErrorSignal_mips(AecCore *aec, float ef[2][PART_LEN1]) {
+ const float mu = aec->extended_filter_enabled ? kExtendedMu : aec->normal_mu;
+ const float error_threshold = aec->extended_filter_enabled
+ ? kExtendedErrorThreshold
+ : aec->normal_error_threshold;
+ int len = (PART_LEN1);
+ float *ef0 = ef[0];
+ float *ef1 = ef[1];
+ float *xPow = aec->xPow;
+ float fac1 = 1e-10f;
+ float err_th2 = error_threshold * error_threshold;
+ float f0, f1, f2;
+#if !defined(MIPS32_R2_LE)
+ float f3;
+#endif
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "1: \n\t"
+ "lwc1 %[f0], 0(%[xPow]) \n\t"
+ "lwc1 %[f1], 0(%[ef0]) \n\t"
+ "lwc1 %[f2], 0(%[ef1]) \n\t"
+ "add.s %[f0], %[f0], %[fac1] \n\t"
+ "div.s %[f1], %[f1], %[f0] \n\t"
+ "div.s %[f2], %[f2], %[f0] \n\t"
+ "mul.s %[f0], %[f1], %[f1] \n\t"
+#if defined(MIPS32_R2_LE)
+ "madd.s %[f0], %[f0], %[f2], %[f2] \n\t"
+#else
+ "mul.s %[f3], %[f2], %[f2] \n\t"
+ "add.s %[f0], %[f0], %[f3] \n\t"
+#endif
+ "c.le.s %[f0], %[err_th2] \n\t"
+ "nop \n\t"
+ "bc1t 2f \n\t"
+ " nop \n\t"
+ "sqrt.s %[f0], %[f0] \n\t"
+ "add.s %[f0], %[f0], %[fac1] \n\t"
+ "div.s %[f0], %[err_th], %[f0] \n\t"
+ "mul.s %[f1], %[f1], %[f0] \n\t"
+ "mul.s %[f2], %[f2], %[f0] \n\t"
+ "2: \n\t"
+ "mul.s %[f1], %[f1], %[mu] \n\t"
+ "mul.s %[f2], %[f2], %[mu] \n\t"
+ "swc1 %[f1], 0(%[ef0]) \n\t"
+ "swc1 %[f2], 0(%[ef1]) \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "addiu %[xPow], %[xPow], 4 \n\t"
+ "addiu %[ef0], %[ef0], 4 \n\t"
+ "bgtz %[len], 1b \n\t"
+ " addiu %[ef1], %[ef1], 4 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2),
+#if !defined(MIPS32_R2_LE)
+ [f3] "=&f" (f3),
+#endif
+ [xPow] "+r" (xPow), [ef0] "+r" (ef0), [ef1] "+r" (ef1),
+ [len] "+r" (len)
+ : [fac1] "f" (fac1), [err_th2] "f" (err_th2), [mu] "f" (mu),
+ [err_th] "f" (error_threshold)
+ : "memory"
+ );
+}
+
+void WebRtcAec_InitAec_mips(void)
+{
+ WebRtcAec_FilterFar = WebRtcAec_FilterFar_mips;
+ WebRtcAec_FilterAdaptation = WebRtcAec_FilterAdaptation_mips;
+ WebRtcAec_ScaleErrorSignal = WebRtcAec_ScaleErrorSignal_mips;
+ WebRtcAec_ComfortNoise = WebRtcAec_ComfortNoise_mips;
+ WebRtcAec_OverdriveAndSuppress = WebRtcAec_OverdriveAndSuppress_mips;
+}
+
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_neon.c b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_neon.c
new file mode 100644
index 00000000000..cec0a7e3379
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_core_neon.c
@@ -0,0 +1,304 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * The core AEC algorithm, neon version of speed-critical functions.
+ *
+ * Based on aec_core_sse2.c.
+ */
+
+#include "webrtc/modules/audio_processing/aec/aec_core.h"
+
+#include <arm_neon.h>
+#include <math.h>
+#include <string.h> // memset
+
+#include "webrtc/modules/audio_processing/aec/aec_core_internal.h"
+#include "webrtc/modules/audio_processing/aec/aec_rdft.h"
+
+enum { kShiftExponentIntoTopMantissa = 8 };
+enum { kFloatExponentShift = 23 };
+
+__inline static float MulRe(float aRe, float aIm, float bRe, float bIm) {
+ return aRe * bRe - aIm * bIm;
+}
+
+static void FilterAdaptationNEON(AecCore* aec,
+ float* fft,
+ float ef[2][PART_LEN1]) {
+ int i;
+ const int num_partitions = aec->num_partitions;
+ for (i = 0; i < num_partitions; i++) {
+ int xPos = (i + aec->xfBufBlockPos) * PART_LEN1;
+ int pos = i * PART_LEN1;
+ int j;
+ // Check for wrap
+ if (i + aec->xfBufBlockPos >= num_partitions) {
+ xPos -= num_partitions * PART_LEN1;
+ }
+
+ // Process the whole array...
+ for (j = 0; j < PART_LEN; j += 4) {
+ // Load xfBuf and ef.
+ const float32x4_t xfBuf_re = vld1q_f32(&aec->xfBuf[0][xPos + j]);
+ const float32x4_t xfBuf_im = vld1q_f32(&aec->xfBuf[1][xPos + j]);
+ const float32x4_t ef_re = vld1q_f32(&ef[0][j]);
+ const float32x4_t ef_im = vld1q_f32(&ef[1][j]);
+ // Calculate the product of conjugate(xfBuf) by ef.
+ // re(conjugate(a) * b) = aRe * bRe + aIm * bIm
+ // im(conjugate(a) * b)= aRe * bIm - aIm * bRe
+ const float32x4_t a = vmulq_f32(xfBuf_re, ef_re);
+ const float32x4_t e = vmlaq_f32(a, xfBuf_im, ef_im);
+ const float32x4_t c = vmulq_f32(xfBuf_re, ef_im);
+ const float32x4_t f = vmlsq_f32(c, xfBuf_im, ef_re);
+ // Interleave real and imaginary parts.
+ const float32x4x2_t g_n_h = vzipq_f32(e, f);
+ // Store
+ vst1q_f32(&fft[2 * j + 0], g_n_h.val[0]);
+ vst1q_f32(&fft[2 * j + 4], g_n_h.val[1]);
+ }
+ // ... and fixup the first imaginary entry.
+ fft[1] = MulRe(aec->xfBuf[0][xPos + PART_LEN],
+ -aec->xfBuf[1][xPos + PART_LEN],
+ ef[0][PART_LEN],
+ ef[1][PART_LEN]);
+
+ aec_rdft_inverse_128(fft);
+ memset(fft + PART_LEN, 0, sizeof(float) * PART_LEN);
+
+ // fft scaling
+ {
+ const float scale = 2.0f / PART_LEN2;
+ const float32x4_t scale_ps = vmovq_n_f32(scale);
+ for (j = 0; j < PART_LEN; j += 4) {
+ const float32x4_t fft_ps = vld1q_f32(&fft[j]);
+ const float32x4_t fft_scale = vmulq_f32(fft_ps, scale_ps);
+ vst1q_f32(&fft[j], fft_scale);
+ }
+ }
+ aec_rdft_forward_128(fft);
+
+ {
+ const float wt1 = aec->wfBuf[1][pos];
+ aec->wfBuf[0][pos + PART_LEN] += fft[1];
+ for (j = 0; j < PART_LEN; j += 4) {
+ float32x4_t wtBuf_re = vld1q_f32(&aec->wfBuf[0][pos + j]);
+ float32x4_t wtBuf_im = vld1q_f32(&aec->wfBuf[1][pos + j]);
+ const float32x4_t fft0 = vld1q_f32(&fft[2 * j + 0]);
+ const float32x4_t fft4 = vld1q_f32(&fft[2 * j + 4]);
+ const float32x4x2_t fft_re_im = vuzpq_f32(fft0, fft4);
+ wtBuf_re = vaddq_f32(wtBuf_re, fft_re_im.val[0]);
+ wtBuf_im = vaddq_f32(wtBuf_im, fft_re_im.val[1]);
+
+ vst1q_f32(&aec->wfBuf[0][pos + j], wtBuf_re);
+ vst1q_f32(&aec->wfBuf[1][pos + j], wtBuf_im);
+ }
+ aec->wfBuf[1][pos] = wt1;
+ }
+ }
+}
+
+extern const float WebRtcAec_weightCurve[65];
+extern const float WebRtcAec_overDriveCurve[65];
+
+static float32x4_t vpowq_f32(float32x4_t a, float32x4_t b) {
+ // a^b = exp2(b * log2(a))
+ // exp2(x) and log2(x) are calculated using polynomial approximations.
+ float32x4_t log2_a, b_log2_a, a_exp_b;
+
+ // Calculate log2(x), x = a.
+ {
+ // To calculate log2(x), we decompose x like this:
+ // x = y * 2^n
+ // n is an integer
+ // y is in the [1.0, 2.0) range
+ //
+ // log2(x) = log2(y) + n
+ // n can be evaluated by playing with float representation.
+ // log2(y) in a small range can be approximated, this code uses an order
+ // five polynomial approximation. The coefficients have been
+ // estimated with the Remez algorithm and the resulting
+ // polynomial has a maximum relative error of 0.00086%.
+
+ // Compute n.
+ // This is done by masking the exponent, shifting it into the top bit of
+ // the mantissa, putting eight into the biased exponent (to shift/
+ // compensate the fact that the exponent has been shifted in the top/
+ // fractional part and finally getting rid of the implicit leading one
+ // from the mantissa by substracting it out.
+ const uint32x4_t vec_float_exponent_mask = vdupq_n_u32(0x7F800000);
+ const uint32x4_t vec_eight_biased_exponent = vdupq_n_u32(0x43800000);
+ const uint32x4_t vec_implicit_leading_one = vdupq_n_u32(0x43BF8000);
+ const uint32x4_t two_n = vandq_u32(vreinterpretq_u32_f32(a),
+ vec_float_exponent_mask);
+ const uint32x4_t n_1 = vshrq_n_u32(two_n, kShiftExponentIntoTopMantissa);
+ const uint32x4_t n_0 = vorrq_u32(n_1, vec_eight_biased_exponent);
+ const float32x4_t n =
+ vsubq_f32(vreinterpretq_f32_u32(n_0),
+ vreinterpretq_f32_u32(vec_implicit_leading_one));
+ // Compute y.
+ const uint32x4_t vec_mantissa_mask = vdupq_n_u32(0x007FFFFF);
+ const uint32x4_t vec_zero_biased_exponent_is_one = vdupq_n_u32(0x3F800000);
+ const uint32x4_t mantissa = vandq_u32(vreinterpretq_u32_f32(a),
+ vec_mantissa_mask);
+ const float32x4_t y =
+ vreinterpretq_f32_u32(vorrq_u32(mantissa,
+ vec_zero_biased_exponent_is_one));
+ // Approximate log2(y) ~= (y - 1) * pol5(y).
+ // pol5(y) = C5 * y^5 + C4 * y^4 + C3 * y^3 + C2 * y^2 + C1 * y + C0
+ const float32x4_t C5 = vdupq_n_f32(-3.4436006e-2f);
+ const float32x4_t C4 = vdupq_n_f32(3.1821337e-1f);
+ const float32x4_t C3 = vdupq_n_f32(-1.2315303f);
+ const float32x4_t C2 = vdupq_n_f32(2.5988452f);
+ const float32x4_t C1 = vdupq_n_f32(-3.3241990f);
+ const float32x4_t C0 = vdupq_n_f32(3.1157899f);
+ float32x4_t pol5_y = C5;
+ pol5_y = vmlaq_f32(C4, y, pol5_y);
+ pol5_y = vmlaq_f32(C3, y, pol5_y);
+ pol5_y = vmlaq_f32(C2, y, pol5_y);
+ pol5_y = vmlaq_f32(C1, y, pol5_y);
+ pol5_y = vmlaq_f32(C0, y, pol5_y);
+ const float32x4_t y_minus_one =
+ vsubq_f32(y, vreinterpretq_f32_u32(vec_zero_biased_exponent_is_one));
+ const float32x4_t log2_y = vmulq_f32(y_minus_one, pol5_y);
+
+ // Combine parts.
+ log2_a = vaddq_f32(n, log2_y);
+ }
+
+ // b * log2(a)
+ b_log2_a = vmulq_f32(b, log2_a);
+
+ // Calculate exp2(x), x = b * log2(a).
+ {
+ // To calculate 2^x, we decompose x like this:
+ // x = n + y
+ // n is an integer, the value of x - 0.5 rounded down, therefore
+ // y is in the [0.5, 1.5) range
+ //
+ // 2^x = 2^n * 2^y
+ // 2^n can be evaluated by playing with float representation.
+ // 2^y in a small range can be approximated, this code uses an order two
+ // polynomial approximation. The coefficients have been estimated
+ // with the Remez algorithm and the resulting polynomial has a
+ // maximum relative error of 0.17%.
+ // To avoid over/underflow, we reduce the range of input to ]-127, 129].
+ const float32x4_t max_input = vdupq_n_f32(129.f);
+ const float32x4_t min_input = vdupq_n_f32(-126.99999f);
+ const float32x4_t x_min = vminq_f32(b_log2_a, max_input);
+ const float32x4_t x_max = vmaxq_f32(x_min, min_input);
+ // Compute n.
+ const float32x4_t half = vdupq_n_f32(0.5f);
+ const float32x4_t x_minus_half = vsubq_f32(x_max, half);
+ const int32x4_t x_minus_half_floor = vcvtq_s32_f32(x_minus_half);
+
+ // Compute 2^n.
+ const int32x4_t float_exponent_bias = vdupq_n_s32(127);
+ const int32x4_t two_n_exponent =
+ vaddq_s32(x_minus_half_floor, float_exponent_bias);
+ const float32x4_t two_n =
+ vreinterpretq_f32_s32(vshlq_n_s32(two_n_exponent, kFloatExponentShift));
+ // Compute y.
+ const float32x4_t y = vsubq_f32(x_max, vcvtq_f32_s32(x_minus_half_floor));
+
+ // Approximate 2^y ~= C2 * y^2 + C1 * y + C0.
+ const float32x4_t C2 = vdupq_n_f32(3.3718944e-1f);
+ const float32x4_t C1 = vdupq_n_f32(6.5763628e-1f);
+ const float32x4_t C0 = vdupq_n_f32(1.0017247f);
+ float32x4_t exp2_y = C2;
+ exp2_y = vmlaq_f32(C1, y, exp2_y);
+ exp2_y = vmlaq_f32(C0, y, exp2_y);
+
+ // Combine parts.
+ a_exp_b = vmulq_f32(exp2_y, two_n);
+ }
+
+ return a_exp_b;
+}
+
+static void OverdriveAndSuppressNEON(AecCore* aec,
+ float hNl[PART_LEN1],
+ const float hNlFb,
+ float efw[2][PART_LEN1]) {
+ int i;
+ const float32x4_t vec_hNlFb = vmovq_n_f32(hNlFb);
+ const float32x4_t vec_one = vdupq_n_f32(1.0f);
+ const float32x4_t vec_minus_one = vdupq_n_f32(-1.0f);
+ const float32x4_t vec_overDriveSm = vmovq_n_f32(aec->overDriveSm);
+
+ // vectorized code (four at once)
+ for (i = 0; i + 3 < PART_LEN1; i += 4) {
+ // Weight subbands
+ float32x4_t vec_hNl = vld1q_f32(&hNl[i]);
+ const float32x4_t vec_weightCurve = vld1q_f32(&WebRtcAec_weightCurve[i]);
+ const uint32x4_t bigger = vcgtq_f32(vec_hNl, vec_hNlFb);
+ const float32x4_t vec_weightCurve_hNlFb = vmulq_f32(vec_weightCurve,
+ vec_hNlFb);
+ const float32x4_t vec_one_weightCurve = vsubq_f32(vec_one, vec_weightCurve);
+ const float32x4_t vec_one_weightCurve_hNl = vmulq_f32(vec_one_weightCurve,
+ vec_hNl);
+ const uint32x4_t vec_if0 = vandq_u32(vmvnq_u32(bigger),
+ vreinterpretq_u32_f32(vec_hNl));
+ const float32x4_t vec_one_weightCurve_add =
+ vaddq_f32(vec_weightCurve_hNlFb, vec_one_weightCurve_hNl);
+ const uint32x4_t vec_if1 =
+ vandq_u32(bigger, vreinterpretq_u32_f32(vec_one_weightCurve_add));
+
+ vec_hNl = vreinterpretq_f32_u32(vorrq_u32(vec_if0, vec_if1));
+
+ {
+ const float32x4_t vec_overDriveCurve =
+ vld1q_f32(&WebRtcAec_overDriveCurve[i]);
+ const float32x4_t vec_overDriveSm_overDriveCurve =
+ vmulq_f32(vec_overDriveSm, vec_overDriveCurve);
+ vec_hNl = vpowq_f32(vec_hNl, vec_overDriveSm_overDriveCurve);
+ vst1q_f32(&hNl[i], vec_hNl);
+ }
+
+ // Suppress error signal
+ {
+ float32x4_t vec_efw_re = vld1q_f32(&efw[0][i]);
+ float32x4_t vec_efw_im = vld1q_f32(&efw[1][i]);
+ vec_efw_re = vmulq_f32(vec_efw_re, vec_hNl);
+ vec_efw_im = vmulq_f32(vec_efw_im, vec_hNl);
+
+ // Ooura fft returns incorrect sign on imaginary component. It matters
+ // here because we are making an additive change with comfort noise.
+ vec_efw_im = vmulq_f32(vec_efw_im, vec_minus_one);
+ vst1q_f32(&efw[0][i], vec_efw_re);
+ vst1q_f32(&efw[1][i], vec_efw_im);
+ }
+ }
+
+ // scalar code for the remaining items.
+ for (; i < PART_LEN1; i++) {
+ // Weight subbands
+ if (hNl[i] > hNlFb) {
+ hNl[i] = WebRtcAec_weightCurve[i] * hNlFb +
+ (1 - WebRtcAec_weightCurve[i]) * hNl[i];
+ }
+
+ hNl[i] = powf(hNl[i], aec->overDriveSm * WebRtcAec_overDriveCurve[i]);
+
+ // Suppress error signal
+ efw[0][i] *= hNl[i];
+ efw[1][i] *= hNl[i];
+
+ // Ooura fft returns incorrect sign on imaginary component. It matters
+ // here because we are making an additive change with comfort noise.
+ efw[1][i] *= -1;
+ }
+}
+
+void WebRtcAec_InitAec_neon(void) {
+ WebRtcAec_FilterAdaptation = FilterAdaptationNEON;
+ WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppressNEON;
+}
+
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.c b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.c
index a19e8877bbc..7731b37b224 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.c
@@ -116,7 +116,7 @@ static void bitrv2_32(int* ip, float* a) {
}
}
-static void bitrv2_128(float* a) {
+static void bitrv2_128_C(float* a) {
/*
Following things have been attempted but are no faster:
(a) Storing the swap indexes in a LUT (index calculations are done
@@ -512,7 +512,7 @@ static void cftmdl_128_C(float* a) {
}
}
-static void cftfsub_128(float* a) {
+static void cftfsub_128_C(float* a) {
int j, j1, j2, j3, l;
float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
@@ -542,7 +542,7 @@ static void cftfsub_128(float* a) {
}
}
-static void cftbsub_128(float* a) {
+static void cftbsub_128_C(float* a) {
int j, j1, j2, j3, l;
float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
@@ -640,17 +640,26 @@ rft_sub_128_t cft1st_128;
rft_sub_128_t cftmdl_128;
rft_sub_128_t rftfsub_128;
rft_sub_128_t rftbsub_128;
+rft_sub_128_t cftfsub_128;
+rft_sub_128_t cftbsub_128;
+rft_sub_128_t bitrv2_128;
void aec_rdft_init(void) {
cft1st_128 = cft1st_128_C;
cftmdl_128 = cftmdl_128_C;
rftfsub_128 = rftfsub_128_C;
rftbsub_128 = rftbsub_128_C;
+ cftfsub_128 = cftfsub_128_C;
+ cftbsub_128 = cftbsub_128_C;
+ bitrv2_128 = bitrv2_128_C;
#if defined(WEBRTC_ARCH_X86_FAMILY)
if (WebRtc_GetCPUInfo(kSSE2)) {
aec_rdft_init_sse2();
}
#endif
+#if defined(MIPS_FPU_LE)
+ aec_rdft_init_mips();
+#endif
// init library constants.
makewt_32();
makect_32();
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h
index 3380633ce6c..795c57d44c2 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft.h
@@ -33,13 +33,13 @@ extern float rdft_w[64];
extern float rdft_wk3ri_first[32];
extern float rdft_wk3ri_second[32];
// constants used by SSE2 but initialized in C path.
-extern float rdft_wk1r[32];
-extern float rdft_wk2r[32];
-extern float rdft_wk3r[32];
-extern float rdft_wk1i[32];
-extern float rdft_wk2i[32];
-extern float rdft_wk3i[32];
-extern float cftmdl_wk1r[4];
+extern ALIGN16_BEG float ALIGN16_END rdft_wk1r[32];
+extern ALIGN16_BEG float ALIGN16_END rdft_wk2r[32];
+extern ALIGN16_BEG float ALIGN16_END rdft_wk3r[32];
+extern ALIGN16_BEG float ALIGN16_END rdft_wk1i[32];
+extern ALIGN16_BEG float ALIGN16_END rdft_wk2i[32];
+extern ALIGN16_BEG float ALIGN16_END rdft_wk3i[32];
+extern ALIGN16_BEG float ALIGN16_END cftmdl_wk1r[4];
// code path selection function pointers
typedef void (*rft_sub_128_t)(float* a);
@@ -47,6 +47,9 @@ extern rft_sub_128_t rftfsub_128;
extern rft_sub_128_t rftbsub_128;
extern rft_sub_128_t cft1st_128;
extern rft_sub_128_t cftmdl_128;
+extern rft_sub_128_t cftfsub_128;
+extern rft_sub_128_t cftbsub_128;
+extern rft_sub_128_t bitrv2_128;
// entry points
void aec_rdft_init(void);
@@ -54,4 +57,8 @@ void aec_rdft_init_sse2(void);
void aec_rdft_forward_128(float* a);
void aec_rdft_inverse_128(float* a);
+#if defined(MIPS_FPU_LE)
+void aec_rdft_init_mips(void);
+#endif
+
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_MAIN_SOURCE_AEC_RDFT_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.c b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.c
new file mode 100644
index 00000000000..a0dac5f135c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/aec_rdft_mips.c
@@ -0,0 +1,1213 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_processing/aec/aec_rdft.h"
+#include "webrtc/typedefs.h"
+
+static void bitrv2_128_mips(float *a) {
+ // n is 128
+ float xr, xi, yr, yi;
+
+ xr = a[8];
+ xi = a[9];
+ yr = a[16];
+ yi = a[17];
+ a[8] = yr;
+ a[9] = yi;
+ a[16] = xr;
+ a[17] = xi;
+
+ xr = a[64];
+ xi = a[65];
+ yr = a[2];
+ yi = a[3];
+ a[64] = yr;
+ a[65] = yi;
+ a[2] = xr;
+ a[3] = xi;
+
+ xr = a[72];
+ xi = a[73];
+ yr = a[18];
+ yi = a[19];
+ a[72] = yr;
+ a[73] = yi;
+ a[18] = xr;
+ a[19] = xi;
+
+ xr = a[80];
+ xi = a[81];
+ yr = a[10];
+ yi = a[11];
+ a[80] = yr;
+ a[81] = yi;
+ a[10] = xr;
+ a[11] = xi;
+
+ xr = a[88];
+ xi = a[89];
+ yr = a[26];
+ yi = a[27];
+ a[88] = yr;
+ a[89] = yi;
+ a[26] = xr;
+ a[27] = xi;
+
+ xr = a[74];
+ xi = a[75];
+ yr = a[82];
+ yi = a[83];
+ a[74] = yr;
+ a[75] = yi;
+ a[82] = xr;
+ a[83] = xi;
+
+ xr = a[32];
+ xi = a[33];
+ yr = a[4];
+ yi = a[5];
+ a[32] = yr;
+ a[33] = yi;
+ a[4] = xr;
+ a[5] = xi;
+
+ xr = a[40];
+ xi = a[41];
+ yr = a[20];
+ yi = a[21];
+ a[40] = yr;
+ a[41] = yi;
+ a[20] = xr;
+ a[21] = xi;
+
+ xr = a[48];
+ xi = a[49];
+ yr = a[12];
+ yi = a[13];
+ a[48] = yr;
+ a[49] = yi;
+ a[12] = xr;
+ a[13] = xi;
+
+ xr = a[56];
+ xi = a[57];
+ yr = a[28];
+ yi = a[29];
+ a[56] = yr;
+ a[57] = yi;
+ a[28] = xr;
+ a[29] = xi;
+
+ xr = a[34];
+ xi = a[35];
+ yr = a[68];
+ yi = a[69];
+ a[34] = yr;
+ a[35] = yi;
+ a[68] = xr;
+ a[69] = xi;
+
+ xr = a[42];
+ xi = a[43];
+ yr = a[84];
+ yi = a[85];
+ a[42] = yr;
+ a[43] = yi;
+ a[84] = xr;
+ a[85] = xi;
+
+ xr = a[50];
+ xi = a[51];
+ yr = a[76];
+ yi = a[77];
+ a[50] = yr;
+ a[51] = yi;
+ a[76] = xr;
+ a[77] = xi;
+
+ xr = a[58];
+ xi = a[59];
+ yr = a[92];
+ yi = a[93];
+ a[58] = yr;
+ a[59] = yi;
+ a[92] = xr;
+ a[93] = xi;
+
+ xr = a[44];
+ xi = a[45];
+ yr = a[52];
+ yi = a[53];
+ a[44] = yr;
+ a[45] = yi;
+ a[52] = xr;
+ a[53] = xi;
+
+ xr = a[96];
+ xi = a[97];
+ yr = a[6];
+ yi = a[7];
+ a[96] = yr;
+ a[97] = yi;
+ a[6] = xr;
+ a[7] = xi;
+
+ xr = a[104];
+ xi = a[105];
+ yr = a[22];
+ yi = a[23];
+ a[104] = yr;
+ a[105] = yi;
+ a[22] = xr;
+ a[23] = xi;
+
+ xr = a[112];
+ xi = a[113];
+ yr = a[14];
+ yi = a[15];
+ a[112] = yr;
+ a[113] = yi;
+ a[14] = xr;
+ a[15] = xi;
+
+ xr = a[120];
+ xi = a[121];
+ yr = a[30];
+ yi = a[31];
+ a[120] = yr;
+ a[121] = yi;
+ a[30] = xr;
+ a[31] = xi;
+
+ xr = a[98];
+ xi = a[99];
+ yr = a[70];
+ yi = a[71];
+ a[98] = yr;
+ a[99] = yi;
+ a[70] = xr;
+ a[71] = xi;
+
+ xr = a[106];
+ xi = a[107];
+ yr = a[86];
+ yi = a[87];
+ a[106] = yr;
+ a[107] = yi;
+ a[86] = xr;
+ a[87] = xi;
+
+ xr = a[114];
+ xi = a[115];
+ yr = a[78];
+ yi = a[79];
+ a[114] = yr;
+ a[115] = yi;
+ a[78] = xr;
+ a[79] = xi;
+
+ xr = a[122];
+ xi = a[123];
+ yr = a[94];
+ yi = a[95];
+ a[122] = yr;
+ a[123] = yi;
+ a[94] = xr;
+ a[95] = xi;
+
+ xr = a[100];
+ xi = a[101];
+ yr = a[38];
+ yi = a[39];
+ a[100] = yr;
+ a[101] = yi;
+ a[38] = xr;
+ a[39] = xi;
+
+ xr = a[108];
+ xi = a[109];
+ yr = a[54];
+ yi = a[55];
+ a[108] = yr;
+ a[109] = yi;
+ a[54] = xr;
+ a[55] = xi;
+
+ xr = a[116];
+ xi = a[117];
+ yr = a[46];
+ yi = a[47];
+ a[116] = yr;
+ a[117] = yi;
+ a[46] = xr;
+ a[47] = xi;
+
+ xr = a[124];
+ xi = a[125];
+ yr = a[62];
+ yi = a[63];
+ a[124] = yr;
+ a[125] = yi;
+ a[62] = xr;
+ a[63] = xi;
+
+ xr = a[110];
+ xi = a[111];
+ yr = a[118];
+ yi = a[119];
+ a[110] = yr;
+ a[111] = yi;
+ a[118] = xr;
+ a[119] = xi;
+}
+
+static void cft1st_128_mips(float *a) {
+ float wk1r, wk1i, wk2r, wk2i, wk3r, wk3i;
+ float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
+
+ float f0, f1, f2, f3, f4, f5, f6, f7;
+ int a_ptr, p1_rdft, p2_rdft, count;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ // first 16
+ "lwc1 %[f0], 0(%[a]) \n\t"
+ "lwc1 %[f1], 4(%[a]) \n\t"
+ "lwc1 %[f2], 8(%[a]) \n\t"
+ "lwc1 %[f3], 12(%[a]) \n\t"
+ "lwc1 %[f4], 16(%[a]) \n\t"
+ "lwc1 %[f5], 20(%[a]) \n\t"
+ "lwc1 %[f6], 24(%[a]) \n\t"
+ "lwc1 %[f7], 28(%[a]) \n\t"
+ "add.s %[x0r], %[f0], %[f2] \n\t"
+ "add.s %[x0i], %[f1], %[f3] \n\t"
+ "sub.s %[x1r], %[f0], %[f2] \n\t"
+ "add.s %[x2r], %[f4], %[f6] \n\t"
+ "add.s %[x2i], %[f5], %[f7] \n\t"
+ "sub.s %[x1i], %[f1], %[f3] \n\t"
+ "sub.s %[x3r], %[f4], %[f6] \n\t"
+ "sub.s %[x3i], %[f5], %[f7] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "sub.s %[f4], %[x0r], %[x2r] \n\t"
+ "sub.s %[f5], %[x0i], %[x2i] \n\t"
+ "sub.s %[f2], %[x1r], %[x3i] \n\t"
+ "add.s %[f3], %[x1i], %[x3r] \n\t"
+ "add.s %[f6], %[x1r], %[x3i] \n\t"
+ "sub.s %[f7], %[x1i], %[x3r] \n\t"
+ "swc1 %[f0], 0(%[a]) \n\t"
+ "swc1 %[f1], 4(%[a]) \n\t"
+ "swc1 %[f2], 8(%[a]) \n\t"
+ "swc1 %[f3], 12(%[a]) \n\t"
+ "swc1 %[f4], 16(%[a]) \n\t"
+ "swc1 %[f5], 20(%[a]) \n\t"
+ "swc1 %[f6], 24(%[a]) \n\t"
+ "swc1 %[f7], 28(%[a]) \n\t"
+ "lwc1 %[f0], 32(%[a]) \n\t"
+ "lwc1 %[f1], 36(%[a]) \n\t"
+ "lwc1 %[f2], 40(%[a]) \n\t"
+ "lwc1 %[f3], 44(%[a]) \n\t"
+ "lwc1 %[f4], 48(%[a]) \n\t"
+ "lwc1 %[f5], 52(%[a]) \n\t"
+ "lwc1 %[f6], 56(%[a]) \n\t"
+ "lwc1 %[f7], 60(%[a]) \n\t"
+ "add.s %[x0r], %[f0], %[f2] \n\t"
+ "add.s %[x0i], %[f1], %[f3] \n\t"
+ "sub.s %[x1r], %[f0], %[f2] \n\t"
+ "sub.s %[x1i], %[f1], %[f3] \n\t"
+ "sub.s %[x3r], %[f4], %[f6] \n\t"
+ "sub.s %[x3i], %[f5], %[f7] \n\t"
+ "add.s %[x2r], %[f4], %[f6] \n\t"
+ "add.s %[x2i], %[f5], %[f7] \n\t"
+ "lwc1 %[wk2r], 8(%[rdft_w]) \n\t"
+ "add.s %[f3], %[x1i], %[x3r] \n\t"
+ "sub.s %[f2], %[x1r], %[x3i] \n\t"
+ "add.s %[f6], %[x3i], %[x1r] \n\t"
+ "sub.s %[f7], %[x3r], %[x1i] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "sub.s %[x1r], %[f2], %[f3] \n\t"
+ "add.s %[x1i], %[f3], %[f2] \n\t"
+ "sub.s %[x3r], %[f7], %[f6] \n\t"
+ "add.s %[x3i], %[f7], %[f6] \n\t"
+ "sub.s %[f4], %[x0r], %[x2r] \n\t"
+ "mul.s %[f2], %[wk2r], %[x1r] \n\t"
+ "mul.s %[f3], %[wk2r], %[x1i] \n\t"
+ "mul.s %[f6], %[wk2r], %[x3r] \n\t"
+ "mul.s %[f7], %[wk2r], %[x3i] \n\t"
+ "sub.s %[f5], %[x2i], %[x0i] \n\t"
+ "swc1 %[f0], 32(%[a]) \n\t"
+ "swc1 %[f1], 36(%[a]) \n\t"
+ "swc1 %[f2], 40(%[a]) \n\t"
+ "swc1 %[f3], 44(%[a]) \n\t"
+ "swc1 %[f5], 48(%[a]) \n\t"
+ "swc1 %[f4], 52(%[a]) \n\t"
+ "swc1 %[f6], 56(%[a]) \n\t"
+ "swc1 %[f7], 60(%[a]) \n\t"
+ // prepare for loop
+ "addiu %[a_ptr], %[a], 64 \n\t"
+ "addiu %[p1_rdft], %[rdft_w], 8 \n\t"
+ "addiu %[p2_rdft], %[rdft_w], 16 \n\t"
+ "addiu %[count], $zero, 7 \n\t"
+ // loop
+ "1: \n\t"
+ "lwc1 %[f0], 0(%[a_ptr]) \n\t"
+ "lwc1 %[f1], 4(%[a_ptr]) \n\t"
+ "lwc1 %[f2], 8(%[a_ptr]) \n\t"
+ "lwc1 %[f3], 12(%[a_ptr]) \n\t"
+ "lwc1 %[f4], 16(%[a_ptr]) \n\t"
+ "lwc1 %[f5], 20(%[a_ptr]) \n\t"
+ "lwc1 %[f6], 24(%[a_ptr]) \n\t"
+ "lwc1 %[f7], 28(%[a_ptr]) \n\t"
+ "add.s %[x0r], %[f0], %[f2] \n\t"
+ "add.s %[x2r], %[f4], %[f6] \n\t"
+ "add.s %[x0i], %[f1], %[f3] \n\t"
+ "add.s %[x2i], %[f5], %[f7] \n\t"
+ "sub.s %[x1r], %[f0], %[f2] \n\t"
+ "sub.s %[x1i], %[f1], %[f3] \n\t"
+ "sub.s %[x3r], %[f4], %[f6] \n\t"
+ "sub.s %[x3i], %[f5], %[f7] \n\t"
+ "lwc1 %[wk2i], 4(%[p1_rdft]) \n\t"
+ "sub.s %[f0], %[x0r], %[x2r] \n\t"
+ "sub.s %[f1], %[x0i], %[x2i] \n\t"
+ "add.s %[f2], %[x1i], %[x3r] \n\t"
+ "sub.s %[f3], %[x1r], %[x3i] \n\t"
+ "lwc1 %[wk1r], 0(%[p2_rdft]) \n\t"
+ "add.s %[f4], %[x1r], %[x3i] \n\t"
+ "sub.s %[f5], %[x1i], %[x3r] \n\t"
+ "lwc1 %[wk3r], 8(%[first]) \n\t"
+ "mul.s %[x3r], %[wk2r], %[f0] \n\t"
+ "mul.s %[x3i], %[wk2r], %[f1] \n\t"
+ "mul.s %[x1r], %[wk1r], %[f3] \n\t"
+ "mul.s %[x1i], %[wk1r], %[f2] \n\t"
+ "lwc1 %[wk1i], 4(%[p2_rdft]) \n\t"
+ "mul.s %[f6], %[wk3r], %[f4] \n\t"
+ "mul.s %[f7], %[wk3r], %[f5] \n\t"
+ "lwc1 %[wk3i], 12(%[first]) \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[wk1r], %[wk2i], %[f1] \n\t"
+ "mul.s %[f0], %[wk2i], %[f0] \n\t"
+ "sub.s %[x3r], %[x3r], %[wk1r] \n\t"
+ "add.s %[x3i], %[x3i], %[f0] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "mul.s %[x0r], %[wk1i], %[f2] \n\t"
+ "mul.s %[f3], %[wk1i], %[f3] \n\t"
+ "mul.s %[x2r], %[wk3i], %[f5] \n\t"
+ "mul.s %[f4], %[wk3i], %[f4] \n\t"
+ "sub.s %[x1r], %[x1r], %[x0r] \n\t"
+ "add.s %[x1i], %[x1i], %[f3] \n\t"
+ "sub.s %[f6], %[f6], %[x2r] \n\t"
+ "add.s %[f7], %[f7], %[f4] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "nmsub.s %[x3r], %[x3r], %[wk2i], %[f1] \n\t"
+ "madd.s %[x3i], %[x3i], %[wk2i], %[f0] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "nmsub.s %[x1r], %[x1r], %[wk1i], %[f2] \n\t"
+ "madd.s %[x1i], %[x1i], %[wk1i], %[f3] \n\t"
+ "nmsub.s %[f6], %[f6], %[wk3i], %[f5] \n\t"
+ "madd.s %[f7], %[f7], %[wk3i], %[f4] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "swc1 %[f0], 0(%[a_ptr]) \n\t"
+ "swc1 %[f1], 4(%[a_ptr]) \n\t"
+ "swc1 %[x1r], 8(%[a_ptr]) \n\t"
+ "swc1 %[x1i], 12(%[a_ptr]) \n\t"
+ "swc1 %[x3r], 16(%[a_ptr]) \n\t"
+ "swc1 %[x3i], 20(%[a_ptr]) \n\t"
+ "swc1 %[f6], 24(%[a_ptr]) \n\t"
+ "swc1 %[f7], 28(%[a_ptr]) \n\t"
+ "lwc1 %[f0], 32(%[a_ptr]) \n\t"
+ "lwc1 %[f1], 36(%[a_ptr]) \n\t"
+ "lwc1 %[f2], 40(%[a_ptr]) \n\t"
+ "lwc1 %[f3], 44(%[a_ptr]) \n\t"
+ "lwc1 %[f4], 48(%[a_ptr]) \n\t"
+ "lwc1 %[f5], 52(%[a_ptr]) \n\t"
+ "lwc1 %[f6], 56(%[a_ptr]) \n\t"
+ "lwc1 %[f7], 60(%[a_ptr]) \n\t"
+ "add.s %[x0r], %[f0], %[f2] \n\t"
+ "add.s %[x2r], %[f4], %[f6] \n\t"
+ "add.s %[x0i], %[f1], %[f3] \n\t"
+ "add.s %[x2i], %[f5], %[f7] \n\t"
+ "sub.s %[x1r], %[f0], %[f2] \n\t"
+ "sub.s %[x1i], %[f1], %[f3] \n\t"
+ "sub.s %[x3r], %[f4], %[f6] \n\t"
+ "sub.s %[x3i], %[f5], %[f7] \n\t"
+ "lwc1 %[wk1r], 8(%[p2_rdft]) \n\t"
+ "sub.s %[f0], %[x0r], %[x2r] \n\t"
+ "sub.s %[f1], %[x0i], %[x2i] \n\t"
+ "add.s %[f2], %[x1i], %[x3r] \n\t"
+ "sub.s %[f3], %[x1r], %[x3i] \n\t"
+ "add.s %[f4], %[x1r], %[x3i] \n\t"
+ "sub.s %[f5], %[x1i], %[x3r] \n\t"
+ "lwc1 %[wk3r], 8(%[second]) \n\t"
+ "mul.s %[x3r], %[wk2i], %[f0] \n\t"
+ "mul.s %[x3i], %[wk2i], %[f1] \n\t"
+ "mul.s %[x1r], %[wk1r], %[f3] \n\t"
+ "mul.s %[x1i], %[wk1r], %[f2] \n\t"
+ "mul.s %[f6], %[wk3r], %[f4] \n\t"
+ "mul.s %[f7], %[wk3r], %[f5] \n\t"
+ "lwc1 %[wk1i], 12(%[p2_rdft]) \n\t"
+ "lwc1 %[wk3i], 12(%[second]) \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[wk1r], %[wk2r], %[f1] \n\t"
+ "mul.s %[f0], %[wk2r], %[f0] \n\t"
+ "add.s %[x3r], %[x3r], %[wk1r] \n\t"
+ "neg.s %[x3r], %[x3r] \n\t"
+ "sub.s %[x3i], %[f0], %[x3i] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "mul.s %[x0r], %[wk1i], %[f2] \n\t"
+ "mul.s %[f3], %[wk1i], %[f3] \n\t"
+ "mul.s %[x2r], %[wk3i], %[f5] \n\t"
+ "mul.s %[f4], %[wk3i], %[f4] \n\t"
+ "sub.s %[x1r], %[x1r], %[x0r] \n\t"
+ "add.s %[x1i], %[x1i], %[f3] \n\t"
+ "sub.s %[f6], %[f6], %[x2r] \n\t"
+ "add.s %[f7], %[f7], %[f4] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "nmadd.s %[x3r], %[x3r], %[wk2r], %[f1] \n\t"
+ "msub.s %[x3i], %[x3i], %[wk2r], %[f0] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "nmsub.s %[x1r], %[x1r], %[wk1i], %[f2] \n\t"
+ "madd.s %[x1i], %[x1i], %[wk1i], %[f3] \n\t"
+ "nmsub.s %[f6], %[f6], %[wk3i], %[f5] \n\t"
+ "madd.s %[f7], %[f7], %[wk3i], %[f4] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "addiu %[count], %[count], -1 \n\t"
+ "lwc1 %[wk2r], 8(%[p1_rdft]) \n\t"
+ "addiu %[a_ptr], %[a_ptr], 64 \n\t"
+ "addiu %[p1_rdft], %[p1_rdft], 8 \n\t"
+ "addiu %[p2_rdft], %[p2_rdft], 16 \n\t"
+ "addiu %[first], %[first], 8 \n\t"
+ "swc1 %[f0], -32(%[a_ptr]) \n\t"
+ "swc1 %[f1], -28(%[a_ptr]) \n\t"
+ "swc1 %[x1r], -24(%[a_ptr]) \n\t"
+ "swc1 %[x1i], -20(%[a_ptr]) \n\t"
+ "swc1 %[x3r], -16(%[a_ptr]) \n\t"
+ "swc1 %[x3i], -12(%[a_ptr]) \n\t"
+ "swc1 %[f6], -8(%[a_ptr]) \n\t"
+ "swc1 %[f7], -4(%[a_ptr]) \n\t"
+ "bgtz %[count], 1b \n\t"
+ " addiu %[second], %[second], 8 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2), [f3] "=&f" (f3),
+ [f4] "=&f" (f4), [f5] "=&f" (f5), [f6] "=&f" (f6), [f7] "=&f" (f7),
+ [x0r] "=&f" (x0r), [x0i] "=&f" (x0i), [x1r] "=&f" (x1r),
+ [x1i] "=&f" (x1i), [x2r] "=&f" (x2r), [x2i] "=&f" (x2i),
+ [x3r] "=&f" (x3r), [x3i] "=&f" (x3i), [wk1r] "=&f" (wk1r),
+ [wk1i] "=&f" (wk1i), [wk2r] "=&f" (wk2r), [wk2i] "=&f" (wk2i),
+ [wk3r] "=&f" (wk3r), [wk3i] "=&f" (wk3i), [a_ptr] "=&r" (a_ptr),
+ [p1_rdft] "=&r" (p1_rdft), [p2_rdft] "=&r" (p2_rdft),
+ [count] "=&r" (count)
+ : [a] "r" (a), [rdft_w] "r" (rdft_w), [first] "r" (rdft_wk3ri_first),
+ [second] "r" (rdft_wk3ri_second)
+ : "memory"
+ );
+}
+
+static void cftmdl_128_mips(float *a) {
+ float wk1r, wk1i, wk2r, wk2i, wk3r, wk3i;
+ float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
+ float f0, f1, f2, f3, f4, f5, f6, f7;
+ int tmp_a, count;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "addiu %[tmp_a], %[a], 0 \n\t"
+ "addiu %[count], $zero, 4 \n\t"
+ "1: \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "lwc1 %[f0], 0(%[tmp_a]) \n\t"
+ "lwc1 %[f1], 4(%[tmp_a]) \n\t"
+ "lwc1 %[f2], 32(%[tmp_a]) \n\t"
+ "lwc1 %[f3], 36(%[tmp_a]) \n\t"
+ "lwc1 %[f4], 64(%[tmp_a]) \n\t"
+ "lwc1 %[f5], 68(%[tmp_a]) \n\t"
+ "lwc1 %[f6], 96(%[tmp_a]) \n\t"
+ "lwc1 %[f7], 100(%[tmp_a]) \n\t"
+ "add.s %[x0r], %[f0], %[f2] \n\t"
+ "add.s %[x0i], %[f1], %[f3] \n\t"
+ "add.s %[x2r], %[f4], %[f6] \n\t"
+ "add.s %[x2i], %[f5], %[f7] \n\t"
+ "sub.s %[x1r], %[f0], %[f2] \n\t"
+ "sub.s %[x1i], %[f1], %[f3] \n\t"
+ "sub.s %[x3r], %[f4], %[f6] \n\t"
+ "sub.s %[x3i], %[f5], %[f7] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "sub.s %[f4], %[x0r], %[x2r] \n\t"
+ "sub.s %[f5], %[x0i], %[x2i] \n\t"
+ "sub.s %[f2], %[x1r], %[x3i] \n\t"
+ "add.s %[f3], %[x1i], %[x3r] \n\t"
+ "add.s %[f6], %[x1r], %[x3i] \n\t"
+ "sub.s %[f7], %[x1i], %[x3r] \n\t"
+ "swc1 %[f0], 0(%[tmp_a]) \n\t"
+ "swc1 %[f1], 4(%[tmp_a]) \n\t"
+ "swc1 %[f2], 32(%[tmp_a]) \n\t"
+ "swc1 %[f3], 36(%[tmp_a]) \n\t"
+ "swc1 %[f4], 64(%[tmp_a]) \n\t"
+ "swc1 %[f5], 68(%[tmp_a]) \n\t"
+ "swc1 %[f6], 96(%[tmp_a]) \n\t"
+ "swc1 %[f7], 100(%[tmp_a]) \n\t"
+ "bgtz %[count], 1b \n\t"
+ " addiu %[tmp_a], %[tmp_a], 8 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2), [f3] "=&f" (f3),
+ [f4] "=&f" (f4), [f5] "=&f" (f5), [f6] "=&f" (f6), [f7] "=&f" (f7),
+ [x0r] "=&f" (x0r), [x0i] "=&f" (x0i), [x1r] "=&f" (x1r),
+ [x1i] "=&f" (x1i), [x2r] "=&f" (x2r), [x2i] "=&f" (x2i),
+ [x3r] "=&f" (x3r), [x3i] "=&f" (x3i), [tmp_a] "=&r" (tmp_a),
+ [count] "=&r" (count)
+ : [a] "r" (a)
+ : "memory"
+ );
+ wk2r = rdft_w[2];
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "addiu %[tmp_a], %[a], 128 \n\t"
+ "addiu %[count], $zero, 4 \n\t"
+ "1: \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "lwc1 %[f0], 0(%[tmp_a]) \n\t"
+ "lwc1 %[f1], 4(%[tmp_a]) \n\t"
+ "lwc1 %[f2], 32(%[tmp_a]) \n\t"
+ "lwc1 %[f3], 36(%[tmp_a]) \n\t"
+ "lwc1 %[f4], 64(%[tmp_a]) \n\t"
+ "lwc1 %[f5], 68(%[tmp_a]) \n\t"
+ "lwc1 %[f6], 96(%[tmp_a]) \n\t"
+ "lwc1 %[f7], 100(%[tmp_a]) \n\t"
+ "sub.s %[x1r], %[f0], %[f2] \n\t"
+ "sub.s %[x1i], %[f1], %[f3] \n\t"
+ "sub.s %[x3r], %[f4], %[f6] \n\t"
+ "sub.s %[x3i], %[f5], %[f7] \n\t"
+ "add.s %[x0r], %[f0], %[f2] \n\t"
+ "add.s %[x0i], %[f1], %[f3] \n\t"
+ "add.s %[x2r], %[f4], %[f6] \n\t"
+ "add.s %[x2i], %[f5], %[f7] \n\t"
+ "sub.s %[f0], %[x1r], %[x3i] \n\t"
+ "add.s %[f1], %[x1i], %[x3r] \n\t"
+ "sub.s %[f2], %[x3r], %[x1i] \n\t"
+ "add.s %[f3], %[x3i], %[x1r] \n\t"
+ "add.s %[f4], %[x0r], %[x2r] \n\t"
+ "add.s %[f5], %[x0i], %[x2i] \n\t"
+ "sub.s %[f6], %[f0], %[f1] \n\t"
+ "add.s %[f0], %[f0], %[f1] \n\t"
+ "sub.s %[f7], %[f2], %[f3] \n\t"
+ "add.s %[f2], %[f2], %[f3] \n\t"
+ "sub.s %[f1], %[x2i], %[x0i] \n\t"
+ "mul.s %[f6], %[f6], %[wk2r] \n\t"
+ "mul.s %[f0], %[f0], %[wk2r] \n\t"
+ "sub.s %[f3], %[x0r], %[x2r] \n\t"
+ "mul.s %[f7], %[f7], %[wk2r] \n\t"
+ "mul.s %[f2], %[f2], %[wk2r] \n\t"
+ "swc1 %[f4], 0(%[tmp_a]) \n\t"
+ "swc1 %[f5], 4(%[tmp_a]) \n\t"
+ "swc1 %[f6], 32(%[tmp_a]) \n\t"
+ "swc1 %[f0], 36(%[tmp_a]) \n\t"
+ "swc1 %[f1], 64(%[tmp_a]) \n\t"
+ "swc1 %[f3], 68(%[tmp_a]) \n\t"
+ "swc1 %[f7], 96(%[tmp_a]) \n\t"
+ "swc1 %[f2], 100(%[tmp_a]) \n\t"
+ "bgtz %[count], 1b \n\t"
+ " addiu %[tmp_a], %[tmp_a], 8 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2), [f3] "=&f" (f3),
+ [f4] "=&f" (f4), [f5] "=&f" (f5), [f6] "=&f" (f6), [f7] "=&f" (f7),
+ [x0r] "=&f" (x0r), [x0i] "=&f" (x0i), [x1r] "=&f" (x1r),
+ [x1i] "=&f" (x1i), [x2r] "=&f" (x2r), [x2i] "=&f" (x2i),
+ [x3r] "=&f" (x3r), [x3i] "=&f" (x3i), [tmp_a] "=&r" (tmp_a),
+ [count] "=&r" (count)
+ : [a] "r" (a), [wk2r] "f" (wk2r)
+ : "memory"
+ );
+ wk2i = rdft_w[3];
+ wk1r = rdft_w[4];
+ wk1i = rdft_w[5];
+ wk3r = rdft_wk3ri_first[2];
+ wk3i = rdft_wk3ri_first[3];
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "addiu %[tmp_a], %[a], 256 \n\t"
+ "addiu %[count], $zero, 4 \n\t"
+ "1: \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "lwc1 %[f0], 0(%[tmp_a]) \n\t"
+ "lwc1 %[f1], 4(%[tmp_a]) \n\t"
+ "lwc1 %[f2], 32(%[tmp_a]) \n\t"
+ "lwc1 %[f3], 36(%[tmp_a]) \n\t"
+ "lwc1 %[f4], 64(%[tmp_a]) \n\t"
+ "lwc1 %[f5], 68(%[tmp_a]) \n\t"
+ "lwc1 %[f6], 96(%[tmp_a]) \n\t"
+ "lwc1 %[f7], 100(%[tmp_a]) \n\t"
+ "add.s %[x0r], %[f0], %[f2] \n\t"
+ "add.s %[x2r], %[f4], %[f6] \n\t"
+ "add.s %[x0i], %[f1], %[f3] \n\t"
+ "add.s %[x2i], %[f5], %[f7] \n\t"
+ "sub.s %[x1r], %[f0], %[f2] \n\t"
+ "sub.s %[x1i], %[f1], %[f3] \n\t"
+ "sub.s %[x3r], %[f4], %[f6] \n\t"
+ "sub.s %[x3i], %[f5], %[f7] \n\t"
+ "sub.s %[f0], %[x0r], %[x2r] \n\t"
+ "sub.s %[f1], %[x0i], %[x2i] \n\t"
+ "add.s %[f2], %[x1i], %[x3r] \n\t"
+ "sub.s %[f3], %[x1r], %[x3i] \n\t"
+ "add.s %[f4], %[x1r], %[x3i] \n\t"
+ "sub.s %[f5], %[x1i], %[x3r] \n\t"
+ "mul.s %[x3r], %[wk2r], %[f0] \n\t"
+ "mul.s %[x3i], %[wk2r], %[f1] \n\t"
+ "mul.s %[x1r], %[wk1r], %[f3] \n\t"
+ "mul.s %[x1i], %[wk1r], %[f2] \n\t"
+ "mul.s %[f6], %[wk3r], %[f4] \n\t"
+ "mul.s %[f7], %[wk3r], %[f5] \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[f1], %[wk2i], %[f1] \n\t"
+ "mul.s %[f0], %[wk2i], %[f0] \n\t"
+ "sub.s %[x3r], %[x3r], %[f1] \n\t"
+ "add.s %[x3i], %[x3i], %[f0] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "mul.s %[f2], %[wk1i], %[f2] \n\t"
+ "mul.s %[f3], %[wk1i], %[f3] \n\t"
+ "mul.s %[f5], %[wk3i], %[f5] \n\t"
+ "mul.s %[f4], %[wk3i], %[f4] \n\t"
+ "sub.s %[x1r], %[x1r], %[f2] \n\t"
+ "add.s %[x1i], %[x1i], %[f3] \n\t"
+ "sub.s %[f6], %[f6], %[f5] \n\t"
+ "add.s %[f7], %[f7], %[f4] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "nmsub.s %[x3r], %[x3r], %[wk2i], %[f1] \n\t"
+ "madd.s %[x3i], %[x3i], %[wk2i], %[f0] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "nmsub.s %[x1r], %[x1r], %[wk1i], %[f2] \n\t"
+ "madd.s %[x1i], %[x1i], %[wk1i], %[f3] \n\t"
+ "nmsub.s %[f6], %[f6], %[wk3i], %[f5] \n\t"
+ "madd.s %[f7], %[f7], %[wk3i], %[f4] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "swc1 %[f0], 0(%[tmp_a]) \n\t"
+ "swc1 %[f1], 4(%[tmp_a]) \n\t"
+ "swc1 %[x1r], 32(%[tmp_a]) \n\t"
+ "swc1 %[x1i], 36(%[tmp_a]) \n\t"
+ "swc1 %[x3r], 64(%[tmp_a]) \n\t"
+ "swc1 %[x3i], 68(%[tmp_a]) \n\t"
+ "swc1 %[f6], 96(%[tmp_a]) \n\t"
+ "swc1 %[f7], 100(%[tmp_a]) \n\t"
+ "bgtz %[count], 1b \n\t"
+ " addiu %[tmp_a], %[tmp_a], 8 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2), [f3] "=&f" (f3),
+ [f4] "=&f" (f4), [f5] "=&f" (f5), [f6] "=&f" (f6), [f7] "=&f" (f7),
+ [x0r] "=&f" (x0r), [x0i] "=&f" (x0i), [x1r] "=&f" (x1r),
+ [x1i] "=&f" (x1i), [x2r] "=&f" (x2r), [x2i] "=&f" (x2i),
+ [x3r] "=&f" (x3r), [x3i] "=&f" (x3i), [tmp_a] "=&r" (tmp_a),
+ [count] "=&r" (count)
+ : [a] "r" (a), [wk1r] "f" (wk1r), [wk1i] "f" (wk1i), [wk2r] "f" (wk2r),
+ [wk2i] "f" (wk2i), [wk3r] "f" (wk3r), [wk3i] "f" (wk3i)
+ : "memory"
+ );
+
+ wk1r = rdft_w[6];
+ wk1i = rdft_w[7];
+ wk3r = rdft_wk3ri_second[2];
+ wk3i = rdft_wk3ri_second[3];
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "addiu %[tmp_a], %[a], 384 \n\t"
+ "addiu %[count], $zero, 4 \n\t"
+ "1: \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "lwc1 %[f0], 0(%[tmp_a]) \n\t"
+ "lwc1 %[f1], 4(%[tmp_a]) \n\t"
+ "lwc1 %[f2], 32(%[tmp_a]) \n\t"
+ "lwc1 %[f3], 36(%[tmp_a]) \n\t"
+ "lwc1 %[f4], 64(%[tmp_a]) \n\t"
+ "lwc1 %[f5], 68(%[tmp_a]) \n\t"
+ "lwc1 %[f6], 96(%[tmp_a]) \n\t"
+ "lwc1 %[f7], 100(%[tmp_a]) \n\t"
+ "add.s %[x0r], %[f0], %[f2] \n\t"
+ "add.s %[x2r], %[f4], %[f6] \n\t"
+ "add.s %[x0i], %[f1], %[f3] \n\t"
+ "add.s %[x2i], %[f5], %[f7] \n\t"
+ "sub.s %[x1r], %[f0], %[f2] \n\t"
+ "sub.s %[x1i], %[f1], %[f3] \n\t"
+ "sub.s %[x3r], %[f4], %[f6] \n\t"
+ "sub.s %[x3i], %[f5], %[f7] \n\t"
+ "sub.s %[f0], %[x0r], %[x2r] \n\t"
+ "sub.s %[f1], %[x0i], %[x2i] \n\t"
+ "add.s %[f2], %[x1i], %[x3r] \n\t"
+ "sub.s %[f3], %[x1r], %[x3i] \n\t"
+ "add.s %[f4], %[x1r], %[x3i] \n\t"
+ "sub.s %[f5], %[x1i], %[x3r] \n\t"
+ "mul.s %[x3r], %[wk2i], %[f0] \n\t"
+ "mul.s %[x3i], %[wk2i], %[f1] \n\t"
+ "mul.s %[x1r], %[wk1r], %[f3] \n\t"
+ "mul.s %[x1i], %[wk1r], %[f2] \n\t"
+ "mul.s %[f6], %[wk3r], %[f4] \n\t"
+ "mul.s %[f7], %[wk3r], %[f5] \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[f1], %[wk2r], %[f1] \n\t"
+ "mul.s %[f0], %[wk2r], %[f0] \n\t"
+ "add.s %[x3r], %[x3r], %[f1] \n\t"
+ "neg.s %[x3r], %[x3r] \n\t"
+ "sub.s %[x3i], %[f0], %[x3i] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "mul.s %[f2], %[wk1i], %[f2] \n\t"
+ "mul.s %[f3], %[wk1i], %[f3] \n\t"
+ "mul.s %[f5], %[wk3i], %[f5] \n\t"
+ "mul.s %[f4], %[wk3i], %[f4] \n\t"
+ "sub.s %[x1r], %[x1r], %[f2] \n\t"
+ "add.s %[x1i], %[x1i], %[f3] \n\t"
+ "sub.s %[f6], %[f6], %[f5] \n\t"
+ "add.s %[f7], %[f7], %[f4] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "nmadd.s %[x3r], %[x3r], %[wk2r], %[f1] \n\t"
+ "msub.s %[x3i], %[x3i], %[wk2r], %[f0] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "nmsub.s %[x1r], %[x1r], %[wk1i], %[f2] \n\t"
+ "madd.s %[x1i], %[x1i], %[wk1i], %[f3] \n\t"
+ "nmsub.s %[f6], %[f6], %[wk3i], %[f5] \n\t"
+ "madd.s %[f7], %[f7], %[wk3i], %[f4] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "swc1 %[f0], 0(%[tmp_a]) \n\t"
+ "swc1 %[f1], 4(%[tmp_a]) \n\t"
+ "swc1 %[x1r], 32(%[tmp_a]) \n\t"
+ "swc1 %[x1i], 36(%[tmp_a]) \n\t"
+ "swc1 %[x3r], 64(%[tmp_a]) \n\t"
+ "swc1 %[x3i], 68(%[tmp_a]) \n\t"
+ "swc1 %[f6], 96(%[tmp_a]) \n\t"
+ "swc1 %[f7], 100(%[tmp_a]) \n\t"
+ "bgtz %[count], 1b \n\t"
+ " addiu %[tmp_a], %[tmp_a], 8 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2), [f3] "=&f" (f3),
+ [f4] "=&f" (f4), [f5] "=&f" (f5), [f6] "=&f" (f6), [f7] "=&f" (f7),
+ [x0r] "=&f" (x0r), [x0i] "=&f" (x0i), [x1r] "=&f" (x1r),
+ [x1i] "=&f" (x1i), [x2r] "=&f" (x2r), [x2i] "=&f" (x2i),
+ [x3r] "=&f" (x3r), [x3i] "=&f" (x3i), [tmp_a] "=&r" (tmp_a),
+ [count] "=&r" (count)
+ : [a] "r" (a), [wk1r] "f" (wk1r), [wk1i] "f" (wk1i), [wk2r] "f" (wk2r),
+ [wk2i] "f" (wk2i), [wk3r] "f" (wk3r), [wk3i] "f" (wk3i)
+ : "memory"
+ );
+}
+
+static void cftfsub_128_mips(float *a) {
+ float x0r, x0i, x1r, x1i, x2r, x2i, x3r, x3i;
+ float f0, f1, f2, f3, f4, f5, f6, f7;
+ int tmp_a, count;
+
+ cft1st_128(a);
+ cftmdl_128(a);
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "addiu %[tmp_a], %[a], 0 \n\t"
+ "addiu %[count], $zero, 16 \n\t"
+ "1: \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "lwc1 %[f0], 0(%[tmp_a]) \n\t"
+ "lwc1 %[f1], 4(%[tmp_a]) \n\t"
+ "lwc1 %[f2], 128(%[tmp_a]) \n\t"
+ "lwc1 %[f3], 132(%[tmp_a]) \n\t"
+ "lwc1 %[f4], 256(%[tmp_a]) \n\t"
+ "lwc1 %[f5], 260(%[tmp_a]) \n\t"
+ "lwc1 %[f6], 384(%[tmp_a]) \n\t"
+ "lwc1 %[f7], 388(%[tmp_a]) \n\t"
+ "add.s %[x0r], %[f0], %[f2] \n\t"
+ "add.s %[x0i], %[f1], %[f3] \n\t"
+ "add.s %[x2r], %[f4], %[f6] \n\t"
+ "add.s %[x2i], %[f5], %[f7] \n\t"
+ "sub.s %[x1r], %[f0], %[f2] \n\t"
+ "sub.s %[x1i], %[f1], %[f3] \n\t"
+ "sub.s %[x3r], %[f4], %[f6] \n\t"
+ "sub.s %[x3i], %[f5], %[f7] \n\t"
+ "add.s %[f0], %[x0r], %[x2r] \n\t"
+ "add.s %[f1], %[x0i], %[x2i] \n\t"
+ "sub.s %[f4], %[x0r], %[x2r] \n\t"
+ "sub.s %[f5], %[x0i], %[x2i] \n\t"
+ "sub.s %[f2], %[x1r], %[x3i] \n\t"
+ "add.s %[f3], %[x1i], %[x3r] \n\t"
+ "add.s %[f6], %[x1r], %[x3i] \n\t"
+ "sub.s %[f7], %[x1i], %[x3r] \n\t"
+ "swc1 %[f0], 0(%[tmp_a]) \n\t"
+ "swc1 %[f1], 4(%[tmp_a]) \n\t"
+ "swc1 %[f2], 128(%[tmp_a]) \n\t"
+ "swc1 %[f3], 132(%[tmp_a]) \n\t"
+ "swc1 %[f4], 256(%[tmp_a]) \n\t"
+ "swc1 %[f5], 260(%[tmp_a]) \n\t"
+ "swc1 %[f6], 384(%[tmp_a]) \n\t"
+ "swc1 %[f7], 388(%[tmp_a]) \n\t"
+ "bgtz %[count], 1b \n\t"
+ " addiu %[tmp_a], %[tmp_a], 8 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2), [f3] "=&f" (f3),
+ [f4] "=&f" (f4), [f5] "=&f" (f5), [f6] "=&f" (f6), [f7] "=&f" (f7),
+ [x0r] "=&f" (x0r), [x0i] "=&f" (x0i), [x1r] "=&f" (x1r),
+ [x1i] "=&f" (x1i), [x2r] "=&f" (x2r), [x2i] "=&f" (x2i),
+ [x3r] "=&f" (x3r), [x3i] "=&f" (x3i), [tmp_a] "=&r" (tmp_a),
+ [count] "=&r" (count)
+ : [a] "r" (a)
+ : "memory"
+ );
+}
+
+static void cftbsub_128_mips(float *a) {
+ float f0, f1, f2, f3, f4, f5, f6, f7;
+ float f8, f9, f10, f11, f12, f13, f14, f15;
+ float f16, f17, f18, f19, f20, f21, f22, f23;
+ int tmp_a, count;
+
+ cft1st_128(a);
+ cftmdl_128(a);
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "addiu %[tmp_a], %[a], 0 \n\t"
+ "addiu %[count], $zero, 8 \n\t"
+ "1: \n\t"
+ "addiu %[count], %[count], -1 \n\t"
+ "lwc1 %[f0], 0(%[tmp_a]) \n\t"
+ "lwc1 %[f1], 4(%[tmp_a]) \n\t"
+ "lwc1 %[f2], 128(%[tmp_a]) \n\t"
+ "lwc1 %[f3], 132(%[tmp_a]) \n\t"
+ "lwc1 %[f4], 256(%[tmp_a]) \n\t"
+ "lwc1 %[f5], 260(%[tmp_a]) \n\t"
+ "lwc1 %[f6], 384(%[tmp_a]) \n\t"
+ "lwc1 %[f7], 388(%[tmp_a]) \n\t"
+ "lwc1 %[f8], 8(%[tmp_a]) \n\t"
+ "lwc1 %[f9], 12(%[tmp_a]) \n\t"
+ "lwc1 %[f10], 136(%[tmp_a]) \n\t"
+ "lwc1 %[f11], 140(%[tmp_a]) \n\t"
+ "lwc1 %[f12], 264(%[tmp_a]) \n\t"
+ "lwc1 %[f13], 268(%[tmp_a]) \n\t"
+ "lwc1 %[f14], 392(%[tmp_a]) \n\t"
+ "lwc1 %[f15], 396(%[tmp_a]) \n\t"
+ "add.s %[f16], %[f0], %[f2] \n\t"
+ "add.s %[f17], %[f1], %[f3] \n\t"
+ "add.s %[f18], %[f4], %[f6] \n\t"
+ "add.s %[f19], %[f5], %[f7] \n\t"
+ "sub.s %[f20], %[f0], %[f2] \n\t"
+ "sub.s %[f21], %[f3], %[f1] \n\t"
+ "sub.s %[f22], %[f4], %[f6] \n\t"
+ "sub.s %[f23], %[f5], %[f7] \n\t"
+ "add.s %[f0], %[f8], %[f10] \n\t"
+ "add.s %[f1], %[f9], %[f11] \n\t"
+ "add.s %[f2], %[f12], %[f14] \n\t"
+ "add.s %[f3], %[f13], %[f15] \n\t"
+ "sub.s %[f4], %[f8], %[f10] \n\t"
+ "sub.s %[f5], %[f11], %[f9] \n\t"
+ "sub.s %[f6], %[f12], %[f14] \n\t"
+ "sub.s %[f7], %[f13], %[f15] \n\t"
+ "add.s %[f8], %[f16], %[f18] \n\t"
+ "add.s %[f9], %[f17], %[f19] \n\t"
+ "sub.s %[f12], %[f16], %[f18] \n\t"
+ "sub.s %[f13], %[f19], %[f17] \n\t"
+ "sub.s %[f10], %[f20], %[f23] \n\t"
+ "sub.s %[f11], %[f21], %[f22] \n\t"
+ "add.s %[f14], %[f20], %[f23] \n\t"
+ "add.s %[f15], %[f21], %[f22] \n\t"
+ "neg.s %[f9], %[f9] \n\t"
+ "add.s %[f16], %[f0], %[f2] \n\t"
+ "add.s %[f17], %[f1], %[f3] \n\t"
+ "sub.s %[f20], %[f0], %[f2] \n\t"
+ "sub.s %[f21], %[f3], %[f1] \n\t"
+ "sub.s %[f18], %[f4], %[f7] \n\t"
+ "sub.s %[f19], %[f5], %[f6] \n\t"
+ "add.s %[f22], %[f4], %[f7] \n\t"
+ "add.s %[f23], %[f5], %[f6] \n\t"
+ "neg.s %[f17], %[f17] \n\t"
+ "swc1 %[f8], 0(%[tmp_a]) \n\t"
+ "swc1 %[f10], 128(%[tmp_a]) \n\t"
+ "swc1 %[f11], 132(%[tmp_a]) \n\t"
+ "swc1 %[f12], 256(%[tmp_a]) \n\t"
+ "swc1 %[f13], 260(%[tmp_a]) \n\t"
+ "swc1 %[f14], 384(%[tmp_a]) \n\t"
+ "swc1 %[f15], 388(%[tmp_a]) \n\t"
+ "swc1 %[f9], 4(%[tmp_a]) \n\t"
+ "swc1 %[f16], 8(%[tmp_a]) \n\t"
+ "swc1 %[f18], 136(%[tmp_a]) \n\t"
+ "swc1 %[f19], 140(%[tmp_a]) \n\t"
+ "swc1 %[f20], 264(%[tmp_a]) \n\t"
+ "swc1 %[f21], 268(%[tmp_a]) \n\t"
+ "swc1 %[f22], 392(%[tmp_a]) \n\t"
+ "swc1 %[f23], 396(%[tmp_a]) \n\t"
+ "swc1 %[f17], 12(%[tmp_a]) \n\t"
+ "bgtz %[count], 1b \n\t"
+ " addiu %[tmp_a], %[tmp_a], 16 \n\t"
+ ".set pop \n\t"
+ : [f0] "=&f" (f0), [f1] "=&f" (f1), [f2] "=&f" (f2), [f3] "=&f" (f3),
+ [f4] "=&f" (f4), [f5] "=&f" (f5), [f6] "=&f" (f6), [f7] "=&f" (f7),
+ [f8] "=&f" (f8), [f9] "=&f" (f9), [f10] "=&f" (f10), [f11] "=&f" (f11),
+ [f12] "=&f" (f12), [f13] "=&f" (f13), [f14] "=&f" (f14),
+ [f15] "=&f" (f15), [f16] "=&f" (f16), [f17] "=&f" (f17),
+ [f18] "=&f" (f18), [f19] "=&f" (f19), [f20] "=&f" (f20),
+ [f21] "=&f" (f21), [f22] "=&f" (f22), [f23] "=&f" (f23),
+ [tmp_a] "=&r" (tmp_a), [count] "=&r" (count)
+ : [a] "r" (a)
+ : "memory"
+ );
+}
+
+static void rftfsub_128_mips(float *a) {
+ const float *c = rdft_w + 32;
+ float wkr, wki, xr, xi, yr, yi;
+ const float temp = 0.5f;
+ float aj20=0, aj21=0, ak20=0, ak21=0, ck1=0;
+ float *a1 = a;
+ float *a2 = a;
+ float *c1 = rdft_w + 33;
+ float *c2 = c1 + 30;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "lwc1 %[aj20], 8(%[a2]) \n\t"
+ "lwc1 %[ak20], 504(%[a1]) \n\t"
+ "lwc1 %[ck1], 0(%[c2]) \n\t"
+ "lwc1 %[aj21], 12(%[a2]) \n\t"
+ "lwc1 %[ak21], 508(%[a1]) \n\t"
+ "sub.s %[wkr], %[temp], %[ck1] \n\t"
+ "sub.s %[xr], %[aj20], %[ak20] \n\t"
+ "add.s %[xi], %[aj21], %[ak21] \n\t"
+ "lwc1 %[wki], 0(%[c1]) \n\t"
+ "addiu %[c2], %[c2],-4 \n\t"
+ "mul.s %[yr], %[wkr], %[xr] \n\t"
+ "mul.s %[yi], %[wkr], %[xi] \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[xi], %[wki], %[xi] \n\t"
+ "mul.s %[xr], %[wki], %[xr] \n\t"
+ "sub.s %[yr], %[yr], %[xi] \n\t"
+ "add.s %[yi], %[yi], %[xr] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "nmsub.s %[yr], %[yr], %[wki], %[xi] \n\t"
+ "madd.s %[yi], %[yi], %[wki], %[xr] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "addiu %[c1], %[c1], 4 \n\t"
+ "sub.s %[aj20], %[aj20], %[yr] \n\t"
+ "sub.s %[aj21], %[aj21], %[yi] \n\t"
+ "add.s %[ak20], %[ak20], %[yr] \n\t"
+ "sub.s %[ak21], %[ak21], %[yi] \n\t"
+ "addiu %[a2], %[a2], 8 \n\t"
+ "swc1 %[aj20], 0(%[a2]) \n\t"
+ "swc1 %[aj21], 4(%[a2]) \n\t"
+ "swc1 %[ak20], 504(%[a1]) \n\t"
+ "swc1 %[ak21], 508(%[a1]) \n\t"
+ "addiu %[a1], %[a1], -8 \n\t"
+ //15x2 passes:
+ "1: \n\t"
+ "lwc1 %[ck1], 0(%[c2]) \n\t"
+ "lwc1 %[aj20], 8(%[a2]) \n\t"
+ "lwc1 %[aj21], 12(%[a2]) \n\t"
+ "lwc1 %[ak20], 504(%[a1]) \n\t"
+ "lwc1 %[ak21], 508(%[a1]) \n\t"
+ "lwc1 $f0, -4(%[c2]) \n\t"
+ "lwc1 $f2, 16(%[a2]) \n\t"
+ "lwc1 $f3, 20(%[a2]) \n\t"
+ "lwc1 $f8, 496(%[a1]) \n\t"
+ "lwc1 $f7, 500(%[a1]) \n\t"
+ "sub.s %[wkr], %[temp], %[ck1] \n\t"
+ "sub.s %[xr], %[aj20], %[ak20] \n\t"
+ "add.s %[xi], %[aj21], %[ak21] \n\t"
+ "lwc1 %[wki], 0(%[c1]) \n\t"
+ "sub.s $f0, %[temp], $f0 \n\t"
+ "sub.s $f6, $f2, $f8 \n\t"
+ "add.s $f4, $f3, $f7 \n\t"
+ "lwc1 $f5, 4(%[c1]) \n\t"
+ "mul.s %[yr], %[wkr], %[xr] \n\t"
+ "mul.s %[yi], %[wkr], %[xi] \n\t"
+ "mul.s $f1, $f0, $f6 \n\t"
+ "mul.s $f0, $f0, $f4 \n\t"
+ "addiu %[c2], %[c2], -8 \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[xi], %[wki], %[xi] \n\t"
+ "mul.s %[xr], %[wki], %[xr] \n\t"
+ "mul.s $f4, $f5, $f4 \n\t"
+ "mul.s $f6, $f5, $f6 \n\t"
+ "sub.s %[yr], %[yr], %[xi] \n\t"
+ "add.s %[yi], %[yi], %[xr] \n\t"
+ "sub.s $f1, $f1, $f4 \n\t"
+ "add.s $f0, $f0, $f6 \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "nmsub.s %[yr], %[yr], %[wki], %[xi] \n\t"
+ "madd.s %[yi], %[yi], %[wki], %[xr] \n\t"
+ "nmsub.s $f1, $f1, $f5, $f4 \n\t"
+ "madd.s $f0, $f0, $f5, $f6 \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "addiu %[c1], %[c1], 8 \n\t"
+ "sub.s %[aj20], %[aj20], %[yr] \n\t"
+ "sub.s %[aj21], %[aj21], %[yi] \n\t"
+ "add.s %[ak20], %[ak20], %[yr] \n\t"
+ "sub.s %[ak21], %[ak21], %[yi] \n\t"
+ "sub.s $f2, $f2, $f1 \n\t"
+ "sub.s $f3, $f3, $f0 \n\t"
+ "add.s $f1, $f8, $f1 \n\t"
+ "sub.s $f0, $f7, $f0 \n\t"
+ "swc1 %[aj20], 8(%[a2]) \n\t"
+ "swc1 %[aj21], 12(%[a2]) \n\t"
+ "swc1 %[ak20], 504(%[a1]) \n\t"
+ "swc1 %[ak21], 508(%[a1]) \n\t"
+ "swc1 $f2, 16(%[a2]) \n\t"
+ "swc1 $f3, 20(%[a2]) \n\t"
+ "swc1 $f1, 496(%[a1]) \n\t"
+ "swc1 $f0, 500(%[a1]) \n\t"
+ "addiu %[a2], %[a2], 16 \n\t"
+ "bne %[c2], %[c], 1b \n\t"
+ " addiu %[a1], %[a1], -16 \n\t"
+ ".set pop \n\t"
+ : [a] "+r" (a), [c] "+r" (c), [a1] "+r" (a1), [a2] "+r" (a2),
+ [c1] "+r" (c1), [c2] "+r" (c2), [wkr] "=&f" (wkr), [wki] "=&f" (wki),
+ [xr] "=&f" (xr), [xi] "=&f" (xi), [yr] "=&f" (yr), [yi] "=&f" (yi),
+ [aj20] "=&f" (aj20), [aj21] "=&f" (aj21), [ak20] "=&f" (ak20),
+ [ak21] "=&f" (ak21), [ck1] "=&f" (ck1)
+ : [temp] "f" (temp)
+ : "memory", "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8"
+ );
+}
+
+static void rftbsub_128_mips(float *a) {
+ const float *c = rdft_w + 32;
+ float wkr, wki, xr, xi, yr, yi;
+ a[1] = -a[1];
+ a[65] = -a[65];
+ const float temp = 0.5f;
+ float aj20=0, aj21=0, ak20=0, ak21=0, ck1=0;
+ float *a1 = a;
+ float *a2 = a;
+ float *c1 = rdft_w + 33;
+ float *c2 = c1 + 30;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "lwc1 %[aj20], 8(%[a2]) \n\t"
+ "lwc1 %[ak20], 504(%[a1]) \n\t"
+ "lwc1 %[ck1], 0(%[c2]) \n\t"
+ "lwc1 %[aj21], 12(%[a2]) \n\t"
+ "lwc1 %[ak21], 508(%[a1]) \n\t"
+ "sub.s %[wkr], %[temp], %[ck1] \n\t"
+ "sub.s %[xr], %[aj20], %[ak20] \n\t"
+ "add.s %[xi], %[aj21], %[ak21] \n\t"
+ "lwc1 %[wki], 0(%[c1]) \n\t"
+ "addiu %[c2], %[c2], -4 \n\t"
+ "mul.s %[yr], %[wkr], %[xr] \n\t"
+ "mul.s %[yi], %[wkr], %[xi] \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[xi], %[wki], %[xi] \n\t"
+ "mul.s %[xr], %[wki], %[xr] \n\t"
+ "add.s %[yr], %[yr], %[xi] \n\t"
+ "sub.s %[yi], %[yi], %[xr] \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "madd.s %[yr], %[yr], %[wki], %[xi] \n\t"
+ "nmsub.s %[yi], %[yi], %[wki], %[xr] \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "addiu %[c1], %[c1],4 \n\t"
+ "sub.s %[aj20], %[aj20], %[yr] \n\t"
+ "sub.s %[aj21], %[yi], %[aj21] \n\t"
+ "add.s %[ak20], %[ak20], %[yr] \n\t"
+ "sub.s %[ak21], %[yi], %[ak21] \n\t"
+ "addiu %[a2], %[a2], 8 \n\t"
+ "swc1 %[aj20], 0(%[a2]) \n\t"
+ "swc1 %[aj21], 4(%[a2]) \n\t"
+ "swc1 %[ak20], 504(%[a1]) \n\t"
+ "swc1 %[ak21], 508(%[a1]) \n\t"
+ "addiu %[a1], %[a1], -8 \n\t"
+ //15x2 passes:
+ "1: \n\t"
+ "lwc1 %[ck1], 0(%[c2]) \n\t"
+ "lwc1 %[aj20], 8(%[a2]) \n\t"
+ "lwc1 %[aj21], 12(%[a2]) \n\t"
+ "lwc1 %[ak20], 504(%[a1]) \n\t"
+ "lwc1 %[ak21], 508(%[a1]) \n\t"
+ "lwc1 $f0, -4(%[c2]) \n\t"
+ "lwc1 $f2, 16(%[a2]) \n\t"
+ "lwc1 $f3, 20(%[a2]) \n\t"
+ "lwc1 $f8, 496(%[a1]) \n\t"
+ "lwc1 $f7, 500(%[a1]) \n\t"
+ "sub.s %[wkr], %[temp], %[ck1] \n\t"
+ "sub.s %[xr], %[aj20], %[ak20] \n\t"
+ "add.s %[xi], %[aj21], %[ak21] \n\t"
+ "lwc1 %[wki], 0(%[c1]) \n\t"
+ "sub.s $f0, %[temp], $f0 \n\t"
+ "sub.s $f6, $f2, $f8 \n\t"
+ "add.s $f4, $f3, $f7 \n\t"
+ "lwc1 $f5, 4(%[c1]) \n\t"
+ "mul.s %[yr], %[wkr], %[xr] \n\t"
+ "mul.s %[yi], %[wkr], %[xi] \n\t"
+ "mul.s $f1, $f0, $f6 \n\t"
+ "mul.s $f0, $f0, $f4 \n\t"
+ "addiu %[c2], %[c2], -8 \n\t"
+#if !defined(MIPS32_R2_LE)
+ "mul.s %[xi], %[wki], %[xi] \n\t"
+ "mul.s %[xr], %[wki], %[xr] \n\t"
+ "mul.s $f4, $f5, $f4 \n\t"
+ "mul.s $f6, $f5, $f6 \n\t"
+ "add.s %[yr], %[yr], %[xi] \n\t"
+ "sub.s %[yi], %[yi], %[xr] \n\t"
+ "add.s $f1, $f1, $f4 \n\t"
+ "sub.s $f0, $f0, $f6 \n\t"
+#else // #if !defined(MIPS32_R2_LE)
+ "madd.s %[yr], %[yr], %[wki], %[xi] \n\t"
+ "nmsub.s %[yi], %[yi], %[wki], %[xr] \n\t"
+ "madd.s $f1, $f1, $f5, $f4 \n\t"
+ "nmsub.s $f0, $f0, $f5, $f6 \n\t"
+#endif // #if !defined(MIPS32_R2_LE)
+ "addiu %[c1], %[c1], 8 \n\t"
+ "sub.s %[aj20], %[aj20], %[yr] \n\t"
+ "sub.s %[aj21], %[yi], %[aj21] \n\t"
+ "add.s %[ak20], %[ak20], %[yr] \n\t"
+ "sub.s %[ak21], %[yi], %[ak21] \n\t"
+ "sub.s $f2, $f2, $f1 \n\t"
+ "sub.s $f3, $f0, $f3 \n\t"
+ "add.s $f1, $f8, $f1 \n\t"
+ "sub.s $f0, $f0, $f7 \n\t"
+ "swc1 %[aj20], 8(%[a2]) \n\t"
+ "swc1 %[aj21], 12(%[a2]) \n\t"
+ "swc1 %[ak20], 504(%[a1]) \n\t"
+ "swc1 %[ak21], 508(%[a1]) \n\t"
+ "swc1 $f2, 16(%[a2]) \n\t"
+ "swc1 $f3, 20(%[a2]) \n\t"
+ "swc1 $f1, 496(%[a1]) \n\t"
+ "swc1 $f0, 500(%[a1]) \n\t"
+ "addiu %[a2], %[a2], 16 \n\t"
+ "bne %[c2], %[c], 1b \n\t"
+ " addiu %[a1], %[a1], -16 \n\t"
+ ".set pop \n\t"
+ : [a] "+r" (a), [c] "+r" (c), [a1] "+r" (a1), [a2] "+r" (a2),
+ [c1] "+r" (c1), [c2] "+r" (c2), [wkr] "=&f" (wkr), [wki] "=&f" (wki),
+ [xr] "=&f" (xr), [xi] "=&f" (xi), [yr] "=&f" (yr), [yi] "=&f" (yi),
+ [aj20] "=&f" (aj20), [aj21] "=&f" (aj21), [ak20] "=&f" (ak20),
+ [ak21] "=&f" (ak21), [ck1] "=&f" (ck1)
+ : [temp] "f" (temp)
+ : "memory", "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8"
+ );
+}
+
+void aec_rdft_init_mips(void) {
+ cft1st_128 = cft1st_128_mips;
+ cftmdl_128 = cftmdl_128_mips;
+ rftfsub_128 = rftfsub_128_mips;
+ rftbsub_128 = rftbsub_128_mips;
+ cftfsub_128 = cftfsub_128_mips;
+ cftbsub_128 = cftbsub_128_mips;
+ bitrv2_128 = bitrv2_128_mips;
+}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.c b/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.c
index bbdd5f628b2..ba3b9243e19 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/echo_cancellation.c
@@ -104,18 +104,18 @@ int webrtc_aec_instance_count = 0;
static void EstBufDelayNormal(aecpc_t* aecInst);
static void EstBufDelayExtended(aecpc_t* aecInst);
static int ProcessNormal(aecpc_t* self,
- const int16_t* near,
- const int16_t* near_high,
- int16_t* out,
- int16_t* out_high,
+ const float* near,
+ const float* near_high,
+ float* out,
+ float* out_high,
int16_t num_samples,
int16_t reported_delay_ms,
int32_t skew);
static void ProcessExtended(aecpc_t* self,
- const int16_t* near,
- const int16_t* near_high,
- int16_t* out,
- int16_t* out_high,
+ const float* near,
+ const float* near_high,
+ float* out,
+ float* out_high,
int16_t num_samples,
int16_t reported_delay_ms,
int32_t skew);
@@ -254,7 +254,7 @@ int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq) {
aecpc->checkBuffSize = 1;
aecpc->firstVal = 0;
- aecpc->startup_phase = 1;
+ aecpc->startup_phase = WebRtcAec_reported_delay_enabled(aecpc->aec);
aecpc->bufSizeStart = 0;
aecpc->checkBufSizeCtr = 0;
aecpc->msInSndCardBuf = 0;
@@ -372,10 +372,10 @@ int32_t WebRtcAec_BufferFarend(void* aecInst,
}
int32_t WebRtcAec_Process(void* aecInst,
- const int16_t* nearend,
- const int16_t* nearendH,
- int16_t* out,
- int16_t* outH,
+ const float* nearend,
+ const float* nearendH,
+ float* out,
+ float* outH,
int16_t nrOfSamples,
int16_t msInSndCardBuf,
int32_t skew) {
@@ -632,10 +632,10 @@ AecCore* WebRtcAec_aec_core(void* handle) {
}
static int ProcessNormal(aecpc_t* aecpc,
- const int16_t* nearend,
- const int16_t* nearendH,
- int16_t* out,
- int16_t* outH,
+ const float* nearend,
+ const float* nearendH,
+ float* out,
+ float* outH,
int16_t nrOfSamples,
int16_t msInSndCardBuf,
int32_t skew) {
@@ -689,10 +689,10 @@ static int ProcessNormal(aecpc_t* aecpc,
if (aecpc->startup_phase) {
// Only needed if they don't already point to the same place.
if (nearend != out) {
- memcpy(out, nearend, sizeof(short) * nrOfSamples);
+ memcpy(out, nearend, sizeof(*out) * nrOfSamples);
}
if (nearendH != outH) {
- memcpy(outH, nearendH, sizeof(short) * nrOfSamples);
+ memcpy(outH, nearendH, sizeof(*outH) * nrOfSamples);
}
// The AEC is in the start up mode
@@ -766,7 +766,9 @@ static int ProcessNormal(aecpc_t* aecpc,
}
} else {
// AEC is enabled.
- EstBufDelayNormal(aecpc);
+ if (WebRtcAec_reported_delay_enabled(aecpc->aec)) {
+ EstBufDelayNormal(aecpc);
+ }
// Note that 1 frame is supported for NB and 2 frames for WB.
for (i = 0; i < nFrames; i++) {
@@ -787,10 +789,10 @@ static int ProcessNormal(aecpc_t* aecpc,
}
static void ProcessExtended(aecpc_t* self,
- const int16_t* near,
- const int16_t* near_high,
- int16_t* out,
- int16_t* out_high,
+ const float* near,
+ const float* near_high,
+ float* out,
+ float* out_high,
int16_t num_samples,
int16_t reported_delay_ms,
int32_t skew) {
@@ -821,10 +823,10 @@ static void ProcessExtended(aecpc_t* self,
if (!self->farend_started) {
// Only needed if they don't already point to the same place.
if (near != out) {
- memcpy(out, near, sizeof(short) * num_samples);
+ memcpy(out, near, sizeof(*out) * num_samples);
}
if (near_high != out_high) {
- memcpy(out_high, near_high, sizeof(short) * num_samples);
+ memcpy(out_high, near_high, sizeof(*out_high) * num_samples);
}
return;
}
@@ -842,7 +844,9 @@ static void ProcessExtended(aecpc_t* self,
self->startup_phase = 0;
}
- EstBufDelayExtended(self);
+ if (WebRtcAec_reported_delay_enabled(self->aec)) {
+ EstBufDelayExtended(self);
+ }
{
// |delay_diff_offset| gives us the option to manually rewind the delay on
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/include/echo_cancellation.h b/chromium/third_party/webrtc/modules/audio_processing/aec/include/echo_cancellation.h
index 4c852cf64bf..dc64a345c3e 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/include/echo_cancellation.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/include/echo_cancellation.h
@@ -68,7 +68,7 @@ extern "C" {
*
* Inputs Description
* -------------------------------------------------------------------
- * void **aecInst Pointer to the AEC instance to be created
+ * void** aecInst Pointer to the AEC instance to be created
* and initialized
*
* Outputs Description
@@ -83,7 +83,7 @@ int32_t WebRtcAec_Create(void** aecInst);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecInst Pointer to the AEC instance
+ * void* aecInst Pointer to the AEC instance
*
* Outputs Description
* -------------------------------------------------------------------
@@ -97,7 +97,7 @@ int32_t WebRtcAec_Free(void* aecInst);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecInst Pointer to the AEC instance
+ * void* aecInst Pointer to the AEC instance
* int32_t sampFreq Sampling frequency of data
* int32_t scSampFreq Soundcard sampling frequency
*
@@ -113,8 +113,8 @@ int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecInst Pointer to the AEC instance
- * int16_t *farend In buffer containing one frame of
+ * void* aecInst Pointer to the AEC instance
+ * int16_t* farend In buffer containing one frame of
* farend signal for L band
* int16_t nrOfSamples Number of samples in farend buffer
*
@@ -132,10 +132,10 @@ int32_t WebRtcAec_BufferFarend(void* aecInst,
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecInst Pointer to the AEC instance
- * int16_t *nearend In buffer containing one frame of
+ * void* aecInst Pointer to the AEC instance
+ * float* nearend In buffer containing one frame of
* nearend+echo signal for L band
- * int16_t *nearendH In buffer containing one frame of
+ * float* nearendH In buffer containing one frame of
* nearend+echo signal for H band
* int16_t nrOfSamples Number of samples in nearend buffer
* int16_t msInSndCardBuf Delay estimate for sound card and
@@ -146,18 +146,18 @@ int32_t WebRtcAec_BufferFarend(void* aecInst,
*
* Outputs Description
* -------------------------------------------------------------------
- * int16_t *out Out buffer, one frame of processed nearend
+ * float* out Out buffer, one frame of processed nearend
* for L band
- * int16_t *outH Out buffer, one frame of processed nearend
+ * float* outH Out buffer, one frame of processed nearend
* for H band
* int32_t return 0: OK
* -1: error
*/
int32_t WebRtcAec_Process(void* aecInst,
- const int16_t* nearend,
- const int16_t* nearendH,
- int16_t* out,
- int16_t* outH,
+ const float* nearend,
+ const float* nearendH,
+ float* out,
+ float* outH,
int16_t nrOfSamples,
int16_t msInSndCardBuf,
int32_t skew);
@@ -167,7 +167,7 @@ int32_t WebRtcAec_Process(void* aecInst,
*
* Inputs Description
* -------------------------------------------------------------------
- * void *handle Pointer to the AEC instance
+ * void* handle Pointer to the AEC instance
* AecConfig config Config instance that contains all
* properties to be set
*
@@ -183,11 +183,11 @@ int WebRtcAec_set_config(void* handle, AecConfig config);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *handle Pointer to the AEC instance
+ * void* handle Pointer to the AEC instance
*
* Outputs Description
* -------------------------------------------------------------------
- * int *status 0: Almost certainly nearend single-talk
+ * int* status 0: Almost certainly nearend single-talk
* 1: Might not be neared single-talk
* int return 0: OK
* -1: error
@@ -199,11 +199,11 @@ int WebRtcAec_get_echo_status(void* handle, int* status);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *handle Pointer to the AEC instance
+ * void* handle Pointer to the AEC instance
*
* Outputs Description
* -------------------------------------------------------------------
- * AecMetrics *metrics Struct which will be filled out with the
+ * AecMetrics* metrics Struct which will be filled out with the
* current echo metrics.
* int return 0: OK
* -1: error
@@ -232,7 +232,7 @@ int WebRtcAec_GetDelayMetrics(void* handle, int* median, int* std);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecInst Pointer to the AEC instance
+ * void* aecInst Pointer to the AEC instance
*
* Outputs Description
* -------------------------------------------------------------------
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc
index a19030ae350..a13d47622c9 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/aec/system_delay_unittest.cc
@@ -9,12 +9,12 @@
*/
#include "testing/gtest/include/gtest/gtest.h"
-
extern "C" {
#include "webrtc/modules/audio_processing/aec/aec_core.h"
}
#include "webrtc/modules/audio_processing/aec/echo_cancellation_internal.h"
#include "webrtc/modules/audio_processing/aec/include/echo_cancellation.h"
+#include "webrtc/test/testsupport/gtest_disable.h"
#include "webrtc/typedefs.h"
namespace {
@@ -46,16 +46,18 @@ class SystemDelayTest : public ::testing::Test {
aecpc_t* self_;
int samples_per_frame_;
// Dummy input/output speech data.
- int16_t far_[160];
- int16_t near_[160];
- int16_t out_[160];
+ static const int kSamplesPerChunk = 160;
+ int16_t far_[kSamplesPerChunk];
+ float near_[kSamplesPerChunk];
+ float out_[kSamplesPerChunk];
};
SystemDelayTest::SystemDelayTest()
: handle_(NULL), self_(NULL), samples_per_frame_(0) {
// Dummy input data are set with more or less arbitrary non-zero values.
memset(far_, 1, sizeof(far_));
- memset(near_, 2, sizeof(near_));
+ for (int i = 0; i < kSamplesPerChunk; i++)
+ near_[i] = 514.0;
memset(out_, 0, sizeof(out_));
}
@@ -251,6 +253,9 @@ TEST_F(SystemDelayTest, CorrectDelayAfterStableBufferBuildUp) {
// conditions, but with an empty internal far-end buffer. Once that is done we
// verify that the system delay is increased correctly until we have reach an
// internal buffer size of 75% of what's been reported.
+
+ // This test assumes the reported delays are used.
+ WebRtcAec_enable_reported_delay(WebRtcAec_aec_core(handle_), 1);
for (size_t i = 0; i < kNumSampleRates; i++) {
Init(kSampleRateHz[i]);
@@ -332,6 +337,9 @@ TEST_F(SystemDelayTest, CorrectDelayDuringDrift) {
// device buffer. The drift is simulated by decreasing the reported device
// buffer size by 1 ms every 100 ms. If the device buffer size goes below 30
// ms we jump (add) 10 ms to give a repeated pattern.
+
+ // This test assumes the reported delays are used.
+ WebRtcAec_enable_reported_delay(WebRtcAec_aec_core(handle_), 1);
for (size_t i = 0; i < kNumSampleRates; i++) {
Init(kSampleRateHz[i]);
RunStableStartup();
@@ -365,6 +373,9 @@ TEST_F(SystemDelayTest, ShouldRecoverAfterGlitch) {
// the device.
// The system is said to be in a non-causal state if the difference between
// the device buffer and system delay is less than a block (64 samples).
+
+ // This test assumes the reported delays are used.
+ WebRtcAec_enable_reported_delay(WebRtcAec_aec_core(handle_), 1);
for (size_t i = 0; i < kNumSampleRates; i++) {
Init(kSampleRateHz[i]);
RunStableStartup();
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.c b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.c
index fc94f1b889e..0f34874612d 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core.c
@@ -266,6 +266,9 @@ int WebRtcAecm_CreateCore(AecmCore_t **aecmInst)
aecm = NULL;
return -1;
}
+ // TODO(bjornv): Explicitly disable robust delay validation until no
+ // performance regression has been established. Then remove the line.
+ WebRtc_enable_robust_validation(aecm->delay_estimator, 0);
aecm->real_fft = WebRtcSpl_CreateRealFFT(PART_LEN_SHIFT);
if (aecm->real_fft == NULL) {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.c b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.c
index 63d4ac90280..f8491e97378 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/aecm_core_c.c
@@ -260,7 +260,7 @@ static int TimeToFrequencyDomain(AecmCore_t* aecm,
__asm __volatile(
"smulbb %[tmp32no1], %[real], %[real]\n\t"
"smlabb %[tmp32no2], %[imag], %[imag], %[tmp32no1]\n\t"
- :[tmp32no1]"+r"(tmp32no1),
+ :[tmp32no1]"+&r"(tmp32no1),
[tmp32no2]"=r"(tmp32no2)
:[real]"r"(freq_signal[i].real),
[imag]"r"(freq_signal[i].imag)
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.c b/chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.c
index b896de0a214..088bbf03f74 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/echo_control_mobile.c
@@ -443,27 +443,14 @@ int32_t WebRtcAecm_Process(void *aecmInst, const int16_t *nearendNoisy,
// Call the AECM
/*WebRtcAecm_ProcessFrame(aecm->aecmCore, farend, &nearend[FRAME_LEN * i],
&out[FRAME_LEN * i], aecm->knownDelay);*/
- if (nearendClean == NULL)
- {
- if (WebRtcAecm_ProcessFrame(aecm->aecmCore,
- farend_ptr,
- &nearendNoisy[FRAME_LEN * i],
- NULL,
- &out[FRAME_LEN * i]) == -1)
- {
- return -1;
- }
- } else
- {
- if (WebRtcAecm_ProcessFrame(aecm->aecmCore,
- farend_ptr,
- &nearendNoisy[FRAME_LEN * i],
- &nearendClean[FRAME_LEN * i],
- &out[FRAME_LEN * i]) == -1)
- {
- return -1;
- }
- }
+ if (WebRtcAecm_ProcessFrame(aecm->aecmCore,
+ farend_ptr,
+ &nearendNoisy[FRAME_LEN * i],
+ (nearendClean
+ ? &nearendClean[FRAME_LEN * i]
+ : NULL),
+ &out[FRAME_LEN * i]) == -1)
+ return -1;
}
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h b/chromium/third_party/webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h
index 8ea2e87e2ff..ac43576dd26 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h
@@ -45,7 +45,7 @@ extern "C" {
*
* Inputs Description
* -------------------------------------------------------------------
- * void **aecmInst Pointer to the AECM instance to be
+ * void** aecmInst Pointer to the AECM instance to be
* created and initialized
*
* Outputs Description
@@ -60,11 +60,11 @@ int32_t WebRtcAecm_Create(void **aecmInst);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecmInst Pointer to the AECM instance
+ * void* aecmInst Pointer to the AECM instance
*
* Outputs Description
* -------------------------------------------------------------------
- * int32_t return 0: OK
+ * int32_t return 0: OK
* -1: error
*/
int32_t WebRtcAecm_Free(void *aecmInst);
@@ -74,7 +74,7 @@ int32_t WebRtcAecm_Free(void *aecmInst);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecmInst Pointer to the AECM instance
+ * void* aecmInst Pointer to the AECM instance
* int32_t sampFreq Sampling frequency of data
*
* Outputs Description
@@ -89,8 +89,8 @@ int32_t WebRtcAecm_Init(void* aecmInst, int32_t sampFreq);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecmInst Pointer to the AECM instance
- * int16_t *farend In buffer containing one frame of
+ * void* aecmInst Pointer to the AECM instance
+ * int16_t* farend In buffer containing one frame of
* farend signal
* int16_t nrOfSamples Number of samples in farend buffer
*
@@ -106,14 +106,14 @@ int32_t WebRtcAecm_BufferFarend(void* aecmInst,
/*
* Runs the AECM on an 80 or 160 sample blocks of data.
*
- * Inputs Description
+ * Inputs Description
* -------------------------------------------------------------------
- * void *aecmInst Pointer to the AECM instance
- * int16_t *nearendNoisy In buffer containing one frame of
+ * void* aecmInst Pointer to the AECM instance
+ * int16_t* nearendNoisy In buffer containing one frame of
* reference nearend+echo signal. If
* noise reduction is active, provide
* the noisy signal here.
- * int16_t *nearendClean In buffer containing one frame of
+ * int16_t* nearendClean In buffer containing one frame of
* nearend+echo signal. If noise
* reduction is active, provide the
* clean signal here. Otherwise pass a
@@ -122,11 +122,11 @@ int32_t WebRtcAecm_BufferFarend(void* aecmInst,
* int16_t msInSndCardBuf Delay estimate for sound card and
* system buffers
*
- * Outputs Description
+ * Outputs Description
* -------------------------------------------------------------------
- * int16_t *out Out buffer, one frame of processed nearend
- * int32_t return 0: OK
- * -1: error
+ * int16_t* out Out buffer, one frame of processed nearend
+ * int32_t return 0: OK
+ * -1: error
*/
int32_t WebRtcAecm_Process(void* aecmInst,
const int16_t* nearendNoisy,
@@ -140,8 +140,8 @@ int32_t WebRtcAecm_Process(void* aecmInst,
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecmInst Pointer to the AECM instance
- * AecmConfig config Config instance that contains all
+ * void* aecmInst Pointer to the AECM instance
+ * AecmConfig config Config instance that contains all
* properties to be set
*
* Outputs Description
@@ -156,11 +156,11 @@ int32_t WebRtcAecm_set_config(void* aecmInst, AecmConfig config);
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecmInst Pointer to the AECM instance
+ * void* aecmInst Pointer to the AECM instance
*
* Outputs Description
* -------------------------------------------------------------------
- * AecmConfig *config Pointer to the config instance that
+ * AecmConfig* config Pointer to the config instance that
* all properties will be written to
* int32_t return 0: OK
* -1: error
@@ -178,7 +178,7 @@ int32_t WebRtcAecm_get_config(void *aecmInst, AecmConfig *config);
*
* Outputs Description
* -------------------------------------------------------------------
- * int32_t return 0: OK
+ * int32_t return 0: OK
* -1: error
*/
int32_t WebRtcAecm_InitEchoPath(void* aecmInst,
@@ -197,7 +197,7 @@ int32_t WebRtcAecm_InitEchoPath(void* aecmInst,
*
* Outputs Description
* -------------------------------------------------------------------
- * int32_t return 0: OK
+ * int32_t return 0: OK
* -1: error
*/
int32_t WebRtcAecm_GetEchoPath(void* aecmInst,
@@ -209,7 +209,7 @@ int32_t WebRtcAecm_GetEchoPath(void* aecmInst,
*
* Outputs Description
* -------------------------------------------------------------------
- * size_t return : size in bytes
+ * size_t return Size in bytes
*/
size_t WebRtcAecm_echo_path_size_bytes();
@@ -218,7 +218,7 @@ size_t WebRtcAecm_echo_path_size_bytes();
*
* Inputs Description
* -------------------------------------------------------------------
- * void *aecmInst Pointer to the AECM instance
+ * void* aecmInst Pointer to the AECM instance
*
* Outputs Description
* -------------------------------------------------------------------
diff --git a/chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.c b/chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.c
index 1e8e3d86b2d..4f110cc2092 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.c
@@ -822,10 +822,16 @@ int32_t WebRtcAgc_ProcessAnalog(void *state, int32_t inMicLevel,
if (inMicLevelTmp != stt->micVol)
{
- // Incoming level mismatch; update our level.
- // This could be the case if the volume is changed manually, or if the
- // sound device has a low volume resolution.
- stt->micVol = inMicLevelTmp;
+ if (inMicLevel == stt->lastInMicLevel) {
+ // We requested a volume adjustment, but it didn't occur. This is
+ // probably due to a coarse quantization of the volume slider.
+ // Restore the requested value to prevent getting stuck.
+ inMicLevelTmp = stt->micVol;
+ }
+ else {
+ // As long as the value changed, update to match.
+ stt->micVol = inMicLevelTmp;
+ }
}
if (inMicLevelTmp > stt->maxLevel)
@@ -835,6 +841,7 @@ int32_t WebRtcAgc_ProcessAnalog(void *state, int32_t inMicLevel,
}
// Store last value here, after we've taken care of manual updates etc.
+ stt->lastInMicLevel = inMicLevel;
lastMicVol = stt->micVol;
/* Checks if the signal is saturated. Also a check if individual samples
@@ -1597,6 +1604,7 @@ int WebRtcAgc_Init(void *agcInst, int32_t minLevel, int32_t maxLevel,
stt->maxInit = stt->maxLevel;
stt->zeroCtrlMax = stt->maxAnalog;
+ stt->lastInMicLevel = 0;
/* Initialize micVol parameter */
stt->micVol = stt->maxAnalog;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.h b/chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.h
index 16ea29c4961..b036f449c70 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/agc/analog_agc.h
@@ -111,6 +111,7 @@ typedef struct
int32_t minLevel; // Minimum possible volume level
int32_t minOutput; // Minimum output volume level
int32_t zeroCtrlMax; // Remember max gain => don't amp low input
+ int32_t lastInMicLevel;
int16_t scale; // Scale factor for internal volume levels
#ifdef MIC_LEVEL_FEEDBACK
diff --git a/chromium/third_party/webrtc/modules/audio_processing/agc/digital_agc.c b/chromium/third_party/webrtc/modules/audio_processing/agc/digital_agc.c
index 00565dd7230..4b169c180eb 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/agc/digital_agc.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/agc/digital_agc.c
@@ -118,7 +118,7 @@ int32_t WebRtcAgc_CalculateGainTable(int32_t *gainTable, // Q16
limiterLvlX = analogTarget - limiterOffset;
limiterIdx = 2
+ WebRtcSpl_DivW32W16ResW16(WEBRTC_SPL_LSHIFT_W32((int32_t)limiterLvlX, 13),
- WEBRTC_SPL_RSHIFT_U16(kLog10_2, 1));
+ (kLog10_2 / 2));
tmp16no1 = WebRtcSpl_DivW32W16ResW16(limiterOffset + (kCompRatio >> 1), kCompRatio);
limiterLvl = targetLevelDbfs + tmp16no1;
@@ -288,12 +288,7 @@ int32_t WebRtcAgc_InitDigital(DigitalAgc_t *stt, int16_t agcMode)
int32_t WebRtcAgc_AddFarendToDigital(DigitalAgc_t *stt, const int16_t *in_far,
int16_t nrSamples)
{
- // Check for valid pointer
- if (&stt->vadFarend == NULL)
- {
- return -1;
- }
-
+ assert(stt != NULL);
// VAD for far end
WebRtcAgc_ProcessVad(&stt->vadFarend, in_far, nrSamples);
@@ -778,7 +773,7 @@ int16_t WebRtcAgc_ProcessVad(AgcVad_t *state, // (i) VAD state
tmp16 = WEBRTC_SPL_LSHIFT_W16(3, 12);
tmp32 = WEBRTC_SPL_MUL_16_16(tmp16, (dB - state->meanLongTerm));
tmp32 = WebRtcSpl_DivW32W16(tmp32, state->stdLongTerm);
- tmpU16 = WEBRTC_SPL_LSHIFT_U16((uint16_t)13, 12);
+ tmpU16 = (13 << 12);
tmp32b = WEBRTC_SPL_MUL_16_U16(state->logRatio, tmpU16);
tmp32 += WEBRTC_SPL_RSHIFT_W32(tmp32b, 10);
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_buffer.cc b/chromium/third_party/webrtc/modules/audio_processing/audio_buffer.cc
index 048d048723e..b0f1eb6c1db 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_buffer.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_buffer.cc
@@ -10,6 +10,8 @@
#include "webrtc/modules/audio_processing/audio_buffer.h"
+#include "webrtc/common_audio/include/audio_util.h"
+#include "webrtc/common_audio/resampler/push_sinc_resampler.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
namespace webrtc {
@@ -21,145 +23,343 @@ enum {
kSamplesPer32kHzChannel = 320
};
-void StereoToMono(const int16_t* left, const int16_t* right,
- int16_t* out, int samples_per_channel) {
- assert(left != NULL && right != NULL && out != NULL);
- for (int i = 0; i < samples_per_channel; i++) {
- int32_t data32 = (static_cast<int32_t>(left[i]) +
- static_cast<int32_t>(right[i])) >> 1;
+bool HasKeyboardChannel(AudioProcessing::ChannelLayout layout) {
+ switch (layout) {
+ case AudioProcessing::kMono:
+ case AudioProcessing::kStereo:
+ return false;
+ case AudioProcessing::kMonoAndKeyboard:
+ case AudioProcessing::kStereoAndKeyboard:
+ return true;
+ }
+ assert(false);
+ return false;
+}
+
+int KeyboardChannelIndex(AudioProcessing::ChannelLayout layout) {
+ switch (layout) {
+ case AudioProcessing::kMono:
+ case AudioProcessing::kStereo:
+ assert(false);
+ return -1;
+ case AudioProcessing::kMonoAndKeyboard:
+ return 1;
+ case AudioProcessing::kStereoAndKeyboard:
+ return 2;
+ }
+ assert(false);
+ return -1;
+}
+
- out[i] = WebRtcSpl_SatW32ToW16(data32);
+void StereoToMono(const float* left, const float* right, float* out,
+ int samples_per_channel) {
+ for (int i = 0; i < samples_per_channel; ++i) {
+ out[i] = (left[i] + right[i]) / 2;
}
}
+
+void StereoToMono(const int16_t* left, const int16_t* right, int16_t* out,
+ int samples_per_channel) {
+ for (int i = 0; i < samples_per_channel; ++i) {
+ out[i] = (left[i] + right[i]) >> 1;
+ }
+}
+
} // namespace
-struct AudioChannel {
- AudioChannel() {
- memset(data, 0, sizeof(data));
+// One int16_t and one float ChannelBuffer that are kept in sync. The sync is
+// broken when someone requests write access to either ChannelBuffer, and
+// reestablished when someone requests the outdated ChannelBuffer. It is
+// therefore safe to use the return value of ibuf() and fbuf() until the next
+// call to the other method.
+class IFChannelBuffer {
+ public:
+ IFChannelBuffer(int samples_per_channel, int num_channels)
+ : ivalid_(true),
+ ibuf_(samples_per_channel, num_channels),
+ fvalid_(true),
+ fbuf_(samples_per_channel, num_channels) {}
+
+ ChannelBuffer<int16_t>* ibuf() {
+ RefreshI();
+ fvalid_ = false;
+ return &ibuf_;
+ }
+
+ ChannelBuffer<float>* fbuf() {
+ RefreshF();
+ ivalid_ = false;
+ return &fbuf_;
}
- int16_t data[kSamplesPer32kHzChannel];
+ private:
+ void RefreshF() {
+ if (!fvalid_) {
+ assert(ivalid_);
+ const int16_t* const int_data = ibuf_.data();
+ float* const float_data = fbuf_.data();
+ const int length = fbuf_.length();
+ for (int i = 0; i < length; ++i)
+ float_data[i] = int_data[i];
+ fvalid_ = true;
+ }
+ }
+
+ void RefreshI() {
+ if (!ivalid_) {
+ assert(fvalid_);
+ const float* const float_data = fbuf_.data();
+ int16_t* const int_data = ibuf_.data();
+ const int length = ibuf_.length();
+ for (int i = 0; i < length; ++i)
+ int_data[i] = WEBRTC_SPL_SAT(std::numeric_limits<int16_t>::max(),
+ float_data[i],
+ std::numeric_limits<int16_t>::min());
+ ivalid_ = true;
+ }
+ }
+
+ bool ivalid_;
+ ChannelBuffer<int16_t> ibuf_;
+ bool fvalid_;
+ ChannelBuffer<float> fbuf_;
};
-struct SplitAudioChannel {
- SplitAudioChannel() {
- memset(low_pass_data, 0, sizeof(low_pass_data));
- memset(high_pass_data, 0, sizeof(high_pass_data));
- memset(analysis_filter_state1, 0, sizeof(analysis_filter_state1));
- memset(analysis_filter_state2, 0, sizeof(analysis_filter_state2));
- memset(synthesis_filter_state1, 0, sizeof(synthesis_filter_state1));
- memset(synthesis_filter_state2, 0, sizeof(synthesis_filter_state2));
+class SplitChannelBuffer {
+ public:
+ SplitChannelBuffer(int samples_per_split_channel, int num_channels)
+ : low_(samples_per_split_channel, num_channels),
+ high_(samples_per_split_channel, num_channels) {
}
+ ~SplitChannelBuffer() {}
- int16_t low_pass_data[kSamplesPer16kHzChannel];
- int16_t high_pass_data[kSamplesPer16kHzChannel];
+ int16_t* low_channel(int i) { return low_.ibuf()->channel(i); }
+ int16_t* high_channel(int i) { return high_.ibuf()->channel(i); }
+ float* low_channel_f(int i) { return low_.fbuf()->channel(i); }
+ float* high_channel_f(int i) { return high_.fbuf()->channel(i); }
- int32_t analysis_filter_state1[6];
- int32_t analysis_filter_state2[6];
- int32_t synthesis_filter_state1[6];
- int32_t synthesis_filter_state2[6];
+ private:
+ IFChannelBuffer low_;
+ IFChannelBuffer high_;
};
-// TODO(andrew): check range of input parameters?
-AudioBuffer::AudioBuffer(int max_num_channels,
- int samples_per_channel)
- : max_num_channels_(max_num_channels),
- num_channels_(0),
+AudioBuffer::AudioBuffer(int input_samples_per_channel,
+ int num_input_channels,
+ int process_samples_per_channel,
+ int num_process_channels,
+ int output_samples_per_channel)
+ : input_samples_per_channel_(input_samples_per_channel),
+ num_input_channels_(num_input_channels),
+ proc_samples_per_channel_(process_samples_per_channel),
+ num_proc_channels_(num_process_channels),
+ output_samples_per_channel_(output_samples_per_channel),
+ samples_per_split_channel_(proc_samples_per_channel_),
num_mixed_channels_(0),
num_mixed_low_pass_channels_(0),
- data_was_mixed_(false),
- samples_per_channel_(samples_per_channel),
- samples_per_split_channel_(samples_per_channel),
reference_copied_(false),
activity_(AudioFrame::kVadUnknown),
- is_muted_(false),
- data_(NULL),
- channels_(NULL),
- split_channels_(NULL),
- mixed_channels_(NULL),
- mixed_low_pass_channels_(NULL),
- low_pass_reference_channels_(NULL) {
- if (max_num_channels_ > 1) {
- channels_.reset(new AudioChannel[max_num_channels_]);
- mixed_channels_.reset(new AudioChannel[max_num_channels_]);
- mixed_low_pass_channels_.reset(new AudioChannel[max_num_channels_]);
+ keyboard_data_(NULL),
+ channels_(new IFChannelBuffer(proc_samples_per_channel_,
+ num_proc_channels_)) {
+ assert(input_samples_per_channel_ > 0);
+ assert(proc_samples_per_channel_ > 0);
+ assert(output_samples_per_channel_ > 0);
+ assert(num_input_channels_ > 0 && num_input_channels_ <= 2);
+ assert(num_proc_channels_ <= num_input_channels);
+
+ if (num_input_channels_ == 2 && num_proc_channels_ == 1) {
+ input_buffer_.reset(new ChannelBuffer<float>(input_samples_per_channel_,
+ num_proc_channels_));
+ }
+
+ if (input_samples_per_channel_ != proc_samples_per_channel_ ||
+ output_samples_per_channel_ != proc_samples_per_channel_) {
+ // Create an intermediate buffer for resampling.
+ process_buffer_.reset(new ChannelBuffer<float>(proc_samples_per_channel_,
+ num_proc_channels_));
+ }
+
+ if (input_samples_per_channel_ != proc_samples_per_channel_) {
+ input_resamplers_.reserve(num_proc_channels_);
+ for (int i = 0; i < num_proc_channels_; ++i) {
+ input_resamplers_.push_back(
+ new PushSincResampler(input_samples_per_channel_,
+ proc_samples_per_channel_));
+ }
}
- low_pass_reference_channels_.reset(new AudioChannel[max_num_channels_]);
- if (samples_per_channel_ == kSamplesPer32kHzChannel) {
- split_channels_.reset(new SplitAudioChannel[max_num_channels_]);
+ if (output_samples_per_channel_ != proc_samples_per_channel_) {
+ output_resamplers_.reserve(num_proc_channels_);
+ for (int i = 0; i < num_proc_channels_; ++i) {
+ output_resamplers_.push_back(
+ new PushSincResampler(proc_samples_per_channel_,
+ output_samples_per_channel_));
+ }
+ }
+
+ if (proc_samples_per_channel_ == kSamplesPer32kHzChannel) {
samples_per_split_channel_ = kSamplesPer16kHzChannel;
+ split_channels_.reset(new SplitChannelBuffer(samples_per_split_channel_,
+ num_proc_channels_));
+ filter_states_.reset(new SplitFilterStates[num_proc_channels_]);
}
}
AudioBuffer::~AudioBuffer() {}
-int16_t* AudioBuffer::data(int channel) const {
- assert(channel >= 0 && channel < num_channels_);
- if (data_ != NULL) {
- return data_;
+void AudioBuffer::CopyFrom(const float* const* data,
+ int samples_per_channel,
+ AudioProcessing::ChannelLayout layout) {
+ assert(samples_per_channel == input_samples_per_channel_);
+ assert(ChannelsFromLayout(layout) == num_input_channels_);
+ InitForNewData();
+
+ if (HasKeyboardChannel(layout)) {
+ keyboard_data_ = data[KeyboardChannelIndex(layout)];
}
- return channels_[channel].data;
-}
+ // Downmix.
+ const float* const* data_ptr = data;
+ if (num_input_channels_ == 2 && num_proc_channels_ == 1) {
+ StereoToMono(data[0],
+ data[1],
+ input_buffer_->channel(0),
+ input_samples_per_channel_);
+ data_ptr = input_buffer_->channels();
+ }
-int16_t* AudioBuffer::low_pass_split_data(int channel) const {
- assert(channel >= 0 && channel < num_channels_);
- if (split_channels_.get() == NULL) {
- return data(channel);
+ // Resample.
+ if (input_samples_per_channel_ != proc_samples_per_channel_) {
+ for (int i = 0; i < num_proc_channels_; ++i) {
+ input_resamplers_[i]->Resample(data_ptr[i],
+ input_samples_per_channel_,
+ process_buffer_->channel(i),
+ proc_samples_per_channel_);
+ }
+ data_ptr = process_buffer_->channels();
}
- return split_channels_[channel].low_pass_data;
+ // Convert to int16.
+ for (int i = 0; i < num_proc_channels_; ++i) {
+ ScaleAndRoundToInt16(data_ptr[i], proc_samples_per_channel_,
+ channels_->ibuf()->channel(i));
+ }
}
-int16_t* AudioBuffer::high_pass_split_data(int channel) const {
- assert(channel >= 0 && channel < num_channels_);
- if (split_channels_.get() == NULL) {
- return NULL;
+void AudioBuffer::CopyTo(int samples_per_channel,
+ AudioProcessing::ChannelLayout layout,
+ float* const* data) {
+ assert(samples_per_channel == output_samples_per_channel_);
+ assert(ChannelsFromLayout(layout) == num_proc_channels_);
+
+ // Convert to float.
+ float* const* data_ptr = data;
+ if (output_samples_per_channel_ != proc_samples_per_channel_) {
+ // Convert to an intermediate buffer for subsequent resampling.
+ data_ptr = process_buffer_->channels();
+ }
+ for (int i = 0; i < num_proc_channels_; ++i) {
+ ScaleToFloat(channels_->ibuf()->channel(i),
+ proc_samples_per_channel_,
+ data_ptr[i]);
+ }
+
+ // Resample.
+ if (output_samples_per_channel_ != proc_samples_per_channel_) {
+ for (int i = 0; i < num_proc_channels_; ++i) {
+ output_resamplers_[i]->Resample(data_ptr[i],
+ proc_samples_per_channel_,
+ data[i],
+ output_samples_per_channel_);
+ }
}
+}
- return split_channels_[channel].high_pass_data;
+void AudioBuffer::InitForNewData() {
+ keyboard_data_ = NULL;
+ num_mixed_channels_ = 0;
+ num_mixed_low_pass_channels_ = 0;
+ reference_copied_ = false;
+ activity_ = AudioFrame::kVadUnknown;
}
-int16_t* AudioBuffer::mixed_data(int channel) const {
+const int16_t* AudioBuffer::data(int channel) const {
+ assert(channel >= 0 && channel < num_proc_channels_);
+ return channels_->ibuf()->channel(channel);
+}
+
+int16_t* AudioBuffer::data(int channel) {
+ const AudioBuffer* t = this;
+ return const_cast<int16_t*>(t->data(channel));
+}
+
+float* AudioBuffer::data_f(int channel) {
+ assert(channel >= 0 && channel < num_proc_channels_);
+ return channels_->fbuf()->channel(channel);
+}
+
+const int16_t* AudioBuffer::low_pass_split_data(int channel) const {
+ assert(channel >= 0 && channel < num_proc_channels_);
+ return split_channels_.get() ? split_channels_->low_channel(channel)
+ : data(channel);
+}
+
+int16_t* AudioBuffer::low_pass_split_data(int channel) {
+ const AudioBuffer* t = this;
+ return const_cast<int16_t*>(t->low_pass_split_data(channel));
+}
+
+float* AudioBuffer::low_pass_split_data_f(int channel) {
+ assert(channel >= 0 && channel < num_proc_channels_);
+ return split_channels_.get() ? split_channels_->low_channel_f(channel)
+ : data_f(channel);
+}
+
+const int16_t* AudioBuffer::high_pass_split_data(int channel) const {
+ assert(channel >= 0 && channel < num_proc_channels_);
+ return split_channels_.get() ? split_channels_->high_channel(channel) : NULL;
+}
+
+int16_t* AudioBuffer::high_pass_split_data(int channel) {
+ const AudioBuffer* t = this;
+ return const_cast<int16_t*>(t->high_pass_split_data(channel));
+}
+
+float* AudioBuffer::high_pass_split_data_f(int channel) {
+ assert(channel >= 0 && channel < num_proc_channels_);
+ return split_channels_.get() ? split_channels_->high_channel_f(channel)
+ : NULL;
+}
+
+const int16_t* AudioBuffer::mixed_data(int channel) const {
assert(channel >= 0 && channel < num_mixed_channels_);
- return mixed_channels_[channel].data;
+ return mixed_channels_->channel(channel);
}
-int16_t* AudioBuffer::mixed_low_pass_data(int channel) const {
+const int16_t* AudioBuffer::mixed_low_pass_data(int channel) const {
assert(channel >= 0 && channel < num_mixed_low_pass_channels_);
- return mixed_low_pass_channels_[channel].data;
+ return mixed_low_pass_channels_->channel(channel);
}
-int16_t* AudioBuffer::low_pass_reference(int channel) const {
- assert(channel >= 0 && channel < num_channels_);
+const int16_t* AudioBuffer::low_pass_reference(int channel) const {
+ assert(channel >= 0 && channel < num_proc_channels_);
if (!reference_copied_) {
return NULL;
}
- return low_pass_reference_channels_[channel].data;
-}
-
-int32_t* AudioBuffer::analysis_filter_state1(int channel) const {
- assert(channel >= 0 && channel < num_channels_);
- return split_channels_[channel].analysis_filter_state1;
-}
-
-int32_t* AudioBuffer::analysis_filter_state2(int channel) const {
- assert(channel >= 0 && channel < num_channels_);
- return split_channels_[channel].analysis_filter_state2;
+ return low_pass_reference_channels_->channel(channel);
}
-int32_t* AudioBuffer::synthesis_filter_state1(int channel) const {
- assert(channel >= 0 && channel < num_channels_);
- return split_channels_[channel].synthesis_filter_state1;
+const float* AudioBuffer::keyboard_data() const {
+ return keyboard_data_;
}
-int32_t* AudioBuffer::synthesis_filter_state2(int channel) const {
- assert(channel >= 0 && channel < num_channels_);
- return split_channels_[channel].synthesis_filter_state2;
+SplitFilterStates* AudioBuffer::filter_states(int channel) {
+ assert(channel >= 0 && channel < num_proc_channels_);
+ return &filter_states_[channel];
}
void AudioBuffer::set_activity(AudioFrame::VADActivity activity) {
@@ -170,126 +370,96 @@ AudioFrame::VADActivity AudioBuffer::activity() const {
return activity_;
}
-bool AudioBuffer::is_muted() const {
- return is_muted_;
-}
-
int AudioBuffer::num_channels() const {
- return num_channels_;
+ return num_proc_channels_;
}
int AudioBuffer::samples_per_channel() const {
- return samples_per_channel_;
+ return proc_samples_per_channel_;
}
int AudioBuffer::samples_per_split_channel() const {
return samples_per_split_channel_;
}
+int AudioBuffer::samples_per_keyboard_channel() const {
+ // We don't resample the keyboard channel.
+ return input_samples_per_channel_;
+}
+
// TODO(andrew): Do deinterleaving and mixing in one step?
void AudioBuffer::DeinterleaveFrom(AudioFrame* frame) {
- assert(frame->num_channels_ <= max_num_channels_);
- assert(frame->samples_per_channel_ == samples_per_channel_);
-
- num_channels_ = frame->num_channels_;
- data_was_mixed_ = false;
- num_mixed_channels_ = 0;
- num_mixed_low_pass_channels_ = 0;
- reference_copied_ = false;
+ assert(proc_samples_per_channel_ == input_samples_per_channel_);
+ assert(num_proc_channels_ == num_input_channels_);
+ assert(frame->num_channels_ == num_proc_channels_);
+ assert(frame->samples_per_channel_ == proc_samples_per_channel_);
+ InitForNewData();
activity_ = frame->vad_activity_;
- is_muted_ = false;
- if (frame->energy_ == 0) {
- is_muted_ = true;
- }
-
- if (num_channels_ == 1) {
- // We can get away with a pointer assignment in this case.
- data_ = frame->data_;
- return;
- }
int16_t* interleaved = frame->data_;
- for (int i = 0; i < num_channels_; i++) {
- int16_t* deinterleaved = channels_[i].data;
+ for (int i = 0; i < num_proc_channels_; i++) {
+ int16_t* deinterleaved = channels_->ibuf()->channel(i);
int interleaved_idx = i;
- for (int j = 0; j < samples_per_channel_; j++) {
+ for (int j = 0; j < proc_samples_per_channel_; j++) {
deinterleaved[j] = interleaved[interleaved_idx];
- interleaved_idx += num_channels_;
+ interleaved_idx += num_proc_channels_;
}
}
}
void AudioBuffer::InterleaveTo(AudioFrame* frame, bool data_changed) const {
- assert(frame->num_channels_ == num_channels_);
- assert(frame->samples_per_channel_ == samples_per_channel_);
+ assert(proc_samples_per_channel_ == output_samples_per_channel_);
+ assert(num_proc_channels_ == num_input_channels_);
+ assert(frame->num_channels_ == num_proc_channels_);
+ assert(frame->samples_per_channel_ == proc_samples_per_channel_);
frame->vad_activity_ = activity_;
if (!data_changed) {
return;
}
- if (num_channels_ == 1) {
- if (data_was_mixed_) {
- memcpy(frame->data_,
- channels_[0].data,
- sizeof(int16_t) * samples_per_channel_);
- } else {
- // These should point to the same buffer in this case.
- assert(data_ == frame->data_);
- }
-
- return;
- }
-
int16_t* interleaved = frame->data_;
- for (int i = 0; i < num_channels_; i++) {
- int16_t* deinterleaved = channels_[i].data;
+ for (int i = 0; i < num_proc_channels_; i++) {
+ int16_t* deinterleaved = channels_->ibuf()->channel(i);
int interleaved_idx = i;
- for (int j = 0; j < samples_per_channel_; j++) {
+ for (int j = 0; j < proc_samples_per_channel_; j++) {
interleaved[interleaved_idx] = deinterleaved[j];
- interleaved_idx += num_channels_;
+ interleaved_idx += num_proc_channels_;
}
}
}
-// TODO(andrew): would be good to support the no-mix case with pointer
-// assignment.
-// TODO(andrew): handle mixing to multiple channels?
-void AudioBuffer::Mix(int num_mixed_channels) {
- // We currently only support the stereo to mono case.
- assert(num_channels_ == 2);
- assert(num_mixed_channels == 1);
-
- StereoToMono(channels_[0].data,
- channels_[1].data,
- channels_[0].data,
- samples_per_channel_);
-
- num_channels_ = num_mixed_channels;
- data_was_mixed_ = true;
-}
-
void AudioBuffer::CopyAndMix(int num_mixed_channels) {
// We currently only support the stereo to mono case.
- assert(num_channels_ == 2);
+ assert(num_proc_channels_ == 2);
assert(num_mixed_channels == 1);
+ if (!mixed_channels_.get()) {
+ mixed_channels_.reset(
+ new ChannelBuffer<int16_t>(proc_samples_per_channel_,
+ num_mixed_channels));
+ }
- StereoToMono(channels_[0].data,
- channels_[1].data,
- mixed_channels_[0].data,
- samples_per_channel_);
+ StereoToMono(channels_->ibuf()->channel(0),
+ channels_->ibuf()->channel(1),
+ mixed_channels_->channel(0),
+ proc_samples_per_channel_);
num_mixed_channels_ = num_mixed_channels;
}
void AudioBuffer::CopyAndMixLowPass(int num_mixed_channels) {
// We currently only support the stereo to mono case.
- assert(num_channels_ == 2);
+ assert(num_proc_channels_ == 2);
assert(num_mixed_channels == 1);
+ if (!mixed_low_pass_channels_.get()) {
+ mixed_low_pass_channels_.reset(
+ new ChannelBuffer<int16_t>(samples_per_split_channel_,
+ num_mixed_channels));
+ }
StereoToMono(low_pass_split_data(0),
low_pass_split_data(1),
- mixed_low_pass_channels_[0].data,
+ mixed_low_pass_channels_->channel(0),
samples_per_split_channel_);
num_mixed_low_pass_channels_ = num_mixed_channels;
@@ -297,10 +467,14 @@ void AudioBuffer::CopyAndMixLowPass(int num_mixed_channels) {
void AudioBuffer::CopyLowPassToReference() {
reference_copied_ = true;
- for (int i = 0; i < num_channels_; i++) {
- memcpy(low_pass_reference_channels_[i].data,
- low_pass_split_data(i),
- sizeof(int16_t) * samples_per_split_channel_);
+ if (!low_pass_reference_channels_.get()) {
+ low_pass_reference_channels_.reset(
+ new ChannelBuffer<int16_t>(samples_per_split_channel_,
+ num_proc_channels_));
+ }
+ for (int i = 0; i < num_proc_channels_; i++) {
+ low_pass_reference_channels_->CopyFrom(low_pass_split_data(i), i);
}
}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_buffer.h b/chromium/third_party/webrtc/modules/audio_processing/audio_buffer.h
index 2638bef6058..67e4f485043 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_buffer.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_buffer.h
@@ -8,75 +8,124 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_AUDIO_BUFFER_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_AUDIO_BUFFER_H_
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AUDIO_BUFFER_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AUDIO_BUFFER_H_
+#include <vector>
+
+#include "webrtc/modules/audio_processing/common.h"
+#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/scoped_vector.h"
#include "webrtc/typedefs.h"
namespace webrtc {
-struct AudioChannel;
-struct SplitAudioChannel;
+class PushSincResampler;
+class SplitChannelBuffer;
+class IFChannelBuffer;
+
+struct SplitFilterStates {
+ SplitFilterStates() {
+ memset(analysis_filter_state1, 0, sizeof(analysis_filter_state1));
+ memset(analysis_filter_state2, 0, sizeof(analysis_filter_state2));
+ memset(synthesis_filter_state1, 0, sizeof(synthesis_filter_state1));
+ memset(synthesis_filter_state2, 0, sizeof(synthesis_filter_state2));
+ }
+
+ static const int kStateSize = 6;
+ int analysis_filter_state1[kStateSize];
+ int analysis_filter_state2[kStateSize];
+ int synthesis_filter_state1[kStateSize];
+ int synthesis_filter_state2[kStateSize];
+};
class AudioBuffer {
public:
- AudioBuffer(int max_num_channels, int samples_per_channel);
+ // TODO(ajm): Switch to take ChannelLayouts.
+ AudioBuffer(int input_samples_per_channel,
+ int num_input_channels,
+ int process_samples_per_channel,
+ int num_process_channels,
+ int output_samples_per_channel);
virtual ~AudioBuffer();
int num_channels() const;
int samples_per_channel() const;
int samples_per_split_channel() const;
+ int samples_per_keyboard_channel() const;
- int16_t* data(int channel) const;
- int16_t* low_pass_split_data(int channel) const;
- int16_t* high_pass_split_data(int channel) const;
- int16_t* mixed_data(int channel) const;
- int16_t* mixed_low_pass_data(int channel) const;
- int16_t* low_pass_reference(int channel) const;
+ int16_t* data(int channel);
+ const int16_t* data(int channel) const;
+ int16_t* low_pass_split_data(int channel);
+ const int16_t* low_pass_split_data(int channel) const;
+ int16_t* high_pass_split_data(int channel);
+ const int16_t* high_pass_split_data(int channel) const;
+ const int16_t* mixed_data(int channel) const;
+ const int16_t* mixed_low_pass_data(int channel) const;
+ const int16_t* low_pass_reference(int channel) const;
- int32_t* analysis_filter_state1(int channel) const;
- int32_t* analysis_filter_state2(int channel) const;
- int32_t* synthesis_filter_state1(int channel) const;
- int32_t* synthesis_filter_state2(int channel) const;
+ // Float versions of the accessors, with automatic conversion back and forth
+ // as necessary. The range of the numbers are the same as for int16_t.
+ float* data_f(int channel);
+ float* low_pass_split_data_f(int channel);
+ float* high_pass_split_data_f(int channel);
+
+ const float* keyboard_data() const;
+
+ SplitFilterStates* filter_states(int channel);
void set_activity(AudioFrame::VADActivity activity);
AudioFrame::VADActivity activity() const;
- bool is_muted() const;
-
+ // Use for int16 interleaved data.
void DeinterleaveFrom(AudioFrame* audioFrame);
void InterleaveTo(AudioFrame* audioFrame) const;
// If |data_changed| is false, only the non-audio data members will be copied
// to |frame|.
void InterleaveTo(AudioFrame* frame, bool data_changed) const;
- void Mix(int num_mixed_channels);
+
+ // Use for float deinterleaved data.
+ void CopyFrom(const float* const* data,
+ int samples_per_channel,
+ AudioProcessing::ChannelLayout layout);
+ void CopyTo(int samples_per_channel,
+ AudioProcessing::ChannelLayout layout,
+ float* const* data);
+
void CopyAndMix(int num_mixed_channels);
void CopyAndMixLowPass(int num_mixed_channels);
void CopyLowPassToReference();
private:
- const int max_num_channels_;
- int num_channels_;
+ // Called from DeinterleaveFrom() and CopyFrom().
+ void InitForNewData();
+
+ const int input_samples_per_channel_;
+ const int num_input_channels_;
+ const int proc_samples_per_channel_;
+ const int num_proc_channels_;
+ const int output_samples_per_channel_;
+ int samples_per_split_channel_;
int num_mixed_channels_;
int num_mixed_low_pass_channels_;
- // Whether the original data was replaced with mixed data.
- bool data_was_mixed_;
- const int samples_per_channel_;
- int samples_per_split_channel_;
bool reference_copied_;
AudioFrame::VADActivity activity_;
- bool is_muted_;
-
- int16_t* data_;
- scoped_array<AudioChannel> channels_;
- scoped_array<SplitAudioChannel> split_channels_;
- scoped_array<AudioChannel> mixed_channels_;
- // TODO(andrew): improve this, we don't need the full 32 kHz space here.
- scoped_array<AudioChannel> mixed_low_pass_channels_;
- scoped_array<AudioChannel> low_pass_reference_channels_;
+
+ const float* keyboard_data_;
+ scoped_ptr<IFChannelBuffer> channels_;
+ scoped_ptr<SplitChannelBuffer> split_channels_;
+ scoped_ptr<SplitFilterStates[]> filter_states_;
+ scoped_ptr<ChannelBuffer<int16_t> > mixed_channels_;
+ scoped_ptr<ChannelBuffer<int16_t> > mixed_low_pass_channels_;
+ scoped_ptr<ChannelBuffer<int16_t> > low_pass_reference_channels_;
+ scoped_ptr<ChannelBuffer<float> > input_buffer_;
+ scoped_ptr<ChannelBuffer<float> > process_buffer_;
+ ScopedVector<PushSincResampler> input_resamplers_;
+ ScopedVector<PushSincResampler> output_resamplers_;
};
+
} // namespace webrtc
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_AUDIO_BUFFER_H_
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AUDIO_BUFFER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi b/chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi
index 336b4eee75f..b1d18c5b06a 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing.gypi
@@ -12,6 +12,7 @@
'<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
],
+ 'shared_generated_dir': '<(SHARED_INTERMEDIATE_DIR)/audio_processing/asm_offsets',
},
'targets': [
{
@@ -53,9 +54,9 @@
'audio_buffer.h',
'audio_processing_impl.cc',
'audio_processing_impl.h',
+ 'common.h',
'echo_cancellation_impl.cc',
'echo_cancellation_impl.h',
- 'echo_cancellation_impl_wrapper.h',
'echo_control_mobile_impl.cc',
'echo_control_mobile_impl.h',
'gain_control_impl.cc',
@@ -67,10 +68,12 @@
'level_estimator_impl.h',
'noise_suppression_impl.cc',
'noise_suppression_impl.h',
- 'splitting_filter.cc',
- 'splitting_filter.h',
'processing_component.cc',
'processing_component.h',
+ 'rms_level.cc',
+ 'rms_level.h',
+ 'typing_detection.cc',
+ 'typing_detection.h',
'utility/delay_estimator.c',
'utility/delay_estimator.h',
'utility/delay_estimator_internal.h',
@@ -103,6 +106,17 @@
'ns/nsx_core.h',
'ns/nsx_defines.h',
],
+ 'conditions': [
+ ['target_arch=="mipsel"', {
+ 'sources': [
+ 'ns/nsx_core_mips.c',
+ ],
+ }, {
+ 'sources': [
+ 'ns/nsx_core_c.c',
+ ],
+ }],
+ ],
}, {
'defines': ['WEBRTC_NS_FLOAT'],
'sources': [
@@ -124,6 +138,14 @@
'sources': [
'aecm/aecm_core_mips.c',
],
+ 'conditions': [
+ ['mips_fpu==1', {
+ 'sources': [
+ 'aec/aec_core_mips.c',
+ 'aec/aec_rdft_mips.c',
+ ],
+ }],
+ ],
}, {
'sources': [
'aecm/aecm_core_c.c',
@@ -177,18 +199,22 @@
'<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
],
'sources': [
+ 'aec/aec_core_neon.c',
'aecm/aecm_core_neon.c',
'ns/nsx_core_neon.c',
],
'conditions': [
['OS=="android" or OS=="ios"', {
'dependencies': [
- 'audio_processing_offsets',
+ '<(gen_core_neon_offsets_gyp):*',
],
'sources': [
'aecm/aecm_core_neon.S',
'ns/nsx_core_neon.S',
],
+ 'include_dirs': [
+ '<(shared_generated_dir)',
+ ],
'sources!': [
'aecm/aecm_core_neon.c',
'ns/nsx_core_neon.c',
@@ -197,22 +223,6 @@
}],
],
}],
- 'conditions': [
- ['OS=="android" or OS=="ios"', {
- 'targets': [{
- 'target_name': 'audio_processing_offsets',
- 'type': 'none',
- 'sources': [
- 'aecm/aecm_core_neon_offsets.c',
- 'ns/nsx_core_neon_offsets.c',
- ],
- 'variables': {
- 'asm_header_dir': 'asm_offsets',
- },
- 'includes': ['../../build/generate_asm_header.gypi',],
- }],
- }],
- ],
}],
],
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc
index 4d36ff7e7b4..de387edb2f5 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.cc
@@ -12,17 +12,20 @@
#include <assert.h>
+#include "webrtc/common_audio/include/audio_util.h"
+#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/modules/audio_processing/echo_cancellation_impl_wrapper.h"
+#include "webrtc/modules/audio_processing/common.h"
+#include "webrtc/modules/audio_processing/echo_cancellation_impl.h"
#include "webrtc/modules/audio_processing/echo_control_mobile_impl.h"
#include "webrtc/modules/audio_processing/gain_control_impl.h"
#include "webrtc/modules/audio_processing/high_pass_filter_impl.h"
#include "webrtc/modules/audio_processing/level_estimator_impl.h"
#include "webrtc/modules/audio_processing/noise_suppression_impl.h"
#include "webrtc/modules/audio_processing/processing_component.h"
-#include "webrtc/modules/audio_processing/splitting_filter.h"
#include "webrtc/modules/audio_processing/voice_detection_impl.h"
#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/system_wrappers/interface/compile_assert.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/file_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
@@ -36,9 +39,30 @@
#endif
#endif // WEBRTC_AUDIOPROC_DEBUG_DUMP
+#define RETURN_ON_ERR(expr) \
+ do { \
+ int err = expr; \
+ if (err != kNoError) { \
+ return err; \
+ } \
+ } while (0)
+
namespace webrtc {
+
+// Throughout webrtc, it's assumed that success is represented by zero.
+COMPILE_ASSERT(AudioProcessing::kNoError == 0, no_error_must_be_zero);
+
AudioProcessing* AudioProcessing::Create(int id) {
- AudioProcessingImpl* apm = new AudioProcessingImpl(id);
+ return Create();
+}
+
+AudioProcessing* AudioProcessing::Create() {
+ Config config;
+ return Create(config);
+}
+
+AudioProcessing* AudioProcessing::Create(const Config& config) {
+ AudioProcessingImpl* apm = new AudioProcessingImpl(config);
if (apm->Initialize() != kNoError) {
delete apm;
apm = NULL;
@@ -47,12 +71,8 @@ AudioProcessing* AudioProcessing::Create(int id) {
return apm;
}
-int32_t AudioProcessing::TimeUntilNextProcess() { return -1; }
-int32_t AudioProcessing::Process() { return -1; }
-
-AudioProcessingImpl::AudioProcessingImpl(int id)
- : id_(id),
- echo_cancellation_(NULL),
+AudioProcessingImpl::AudioProcessingImpl(const Config& config)
+ : echo_cancellation_(NULL),
echo_control_mobile_(NULL),
gain_control_(NULL),
high_pass_filter_(NULL),
@@ -60,41 +80,43 @@ AudioProcessingImpl::AudioProcessingImpl(int id)
noise_suppression_(NULL),
voice_detection_(NULL),
crit_(CriticalSectionWrapper::CreateCriticalSection()),
- render_audio_(NULL),
- capture_audio_(NULL),
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
debug_file_(FileWrapper::Create()),
event_msg_(new audioproc::Event()),
#endif
- sample_rate_hz_(kSampleRate16kHz),
- split_sample_rate_hz_(kSampleRate16kHz),
- samples_per_channel_(sample_rate_hz_ / 100),
+ fwd_in_format_(kSampleRate16kHz, 1),
+ fwd_proc_format_(kSampleRate16kHz, 1),
+ fwd_out_format_(kSampleRate16kHz),
+ rev_in_format_(kSampleRate16kHz, 1),
+ rev_proc_format_(kSampleRate16kHz, 1),
+ split_rate_(kSampleRate16kHz),
stream_delay_ms_(0),
delay_offset_ms_(0),
was_stream_delay_set_(false),
- num_reverse_channels_(1),
- num_input_channels_(1),
- num_output_channels_(1) {
- echo_cancellation_ = EchoCancellationImplWrapper::Create(this);
+ output_will_be_muted_(false),
+ key_pressed_(false) {
+ echo_cancellation_ = new EchoCancellationImpl(this, crit_);
component_list_.push_back(echo_cancellation_);
- echo_control_mobile_ = new EchoControlMobileImpl(this);
+ echo_control_mobile_ = new EchoControlMobileImpl(this, crit_);
component_list_.push_back(echo_control_mobile_);
- gain_control_ = new GainControlImpl(this);
+ gain_control_ = new GainControlImpl(this, crit_);
component_list_.push_back(gain_control_);
- high_pass_filter_ = new HighPassFilterImpl(this);
+ high_pass_filter_ = new HighPassFilterImpl(this, crit_);
component_list_.push_back(high_pass_filter_);
- level_estimator_ = new LevelEstimatorImpl(this);
+ level_estimator_ = new LevelEstimatorImpl(this, crit_);
component_list_.push_back(level_estimator_);
- noise_suppression_ = new NoiseSuppressionImpl(this);
+ noise_suppression_ = new NoiseSuppressionImpl(this, crit_);
component_list_.push_back(noise_suppression_);
- voice_detection_ = new VoiceDetectionImpl(this);
+ voice_detection_ = new VoiceDetectionImpl(this, crit_);
component_list_.push_back(voice_detection_);
+
+ SetExtraOptions(config);
}
AudioProcessingImpl::~AudioProcessingImpl() {
@@ -112,52 +134,52 @@ AudioProcessingImpl::~AudioProcessingImpl() {
debug_file_->CloseFile();
}
#endif
-
- if (render_audio_) {
- delete render_audio_;
- render_audio_ = NULL;
- }
-
- if (capture_audio_) {
- delete capture_audio_;
- capture_audio_ = NULL;
- }
}
-
delete crit_;
crit_ = NULL;
}
-CriticalSectionWrapper* AudioProcessingImpl::crit() const {
- return crit_;
-}
-
-int AudioProcessingImpl::split_sample_rate_hz() const {
- return split_sample_rate_hz_;
-}
-
int AudioProcessingImpl::Initialize() {
CriticalSectionScoped crit_scoped(crit_);
return InitializeLocked();
}
-int AudioProcessingImpl::InitializeLocked() {
- if (render_audio_ != NULL) {
- delete render_audio_;
- render_audio_ = NULL;
- }
-
- if (capture_audio_ != NULL) {
- delete capture_audio_;
- capture_audio_ = NULL;
- }
-
- render_audio_ = new AudioBuffer(num_reverse_channels_,
- samples_per_channel_);
- capture_audio_ = new AudioBuffer(num_input_channels_,
- samples_per_channel_);
+int AudioProcessingImpl::set_sample_rate_hz(int rate) {
+ CriticalSectionScoped crit_scoped(crit_);
+ return InitializeLocked(rate,
+ rate,
+ rev_in_format_.rate(),
+ fwd_in_format_.num_channels(),
+ fwd_proc_format_.num_channels(),
+ rev_in_format_.num_channels());
+}
+
+int AudioProcessingImpl::Initialize(int input_sample_rate_hz,
+ int output_sample_rate_hz,
+ int reverse_sample_rate_hz,
+ ChannelLayout input_layout,
+ ChannelLayout output_layout,
+ ChannelLayout reverse_layout) {
+ CriticalSectionScoped crit_scoped(crit_);
+ return InitializeLocked(input_sample_rate_hz,
+ output_sample_rate_hz,
+ reverse_sample_rate_hz,
+ ChannelsFromLayout(input_layout),
+ ChannelsFromLayout(output_layout),
+ ChannelsFromLayout(reverse_layout));
+}
- was_stream_delay_set_ = false;
+int AudioProcessingImpl::InitializeLocked() {
+ render_audio_.reset(new AudioBuffer(rev_in_format_.samples_per_channel(),
+ rev_in_format_.num_channels(),
+ rev_proc_format_.samples_per_channel(),
+ rev_proc_format_.num_channels(),
+ rev_proc_format_.samples_per_channel()));
+ capture_audio_.reset(new AudioBuffer(fwd_in_format_.samples_per_channel(),
+ fwd_in_format_.num_channels(),
+ fwd_proc_format_.samples_per_channel(),
+ fwd_proc_format_.num_channels(),
+ fwd_out_format_.samples_per_channel()));
// Initialize all components.
std::list<ProcessingComponent*>::iterator it;
@@ -180,115 +202,228 @@ int AudioProcessingImpl::InitializeLocked() {
return kNoError;
}
-void AudioProcessingImpl::SetExtraOptions(const Config& config) {
- std::list<ProcessingComponent*>::iterator it;
- for (it = component_list_.begin(); it != component_list_.end(); ++it)
- (*it)->SetExtraOptions(config);
-}
+int AudioProcessingImpl::InitializeLocked(int input_sample_rate_hz,
+ int output_sample_rate_hz,
+ int reverse_sample_rate_hz,
+ int num_input_channels,
+ int num_output_channels,
+ int num_reverse_channels) {
+ if (input_sample_rate_hz <= 0 ||
+ output_sample_rate_hz <= 0 ||
+ reverse_sample_rate_hz <= 0) {
+ return kBadSampleRateError;
+ }
+ if (num_output_channels > num_input_channels) {
+ return kBadNumberChannelsError;
+ }
+ // Only mono and stereo supported currently.
+ if (num_input_channels > 2 || num_input_channels < 1 ||
+ num_output_channels > 2 || num_output_channels < 1 ||
+ num_reverse_channels > 2 || num_reverse_channels < 1) {
+ return kBadNumberChannelsError;
+ }
-int AudioProcessingImpl::EnableExperimentalNs(bool enable) {
- return kNoError;
-}
+ fwd_in_format_.set(input_sample_rate_hz, num_input_channels);
+ fwd_out_format_.set(output_sample_rate_hz);
+ rev_in_format_.set(reverse_sample_rate_hz, num_reverse_channels);
-int AudioProcessingImpl::set_sample_rate_hz(int rate) {
- CriticalSectionScoped crit_scoped(crit_);
- if (rate == sample_rate_hz_) {
- return kNoError;
+ // We process at the closest native rate >= min(input rate, output rate)...
+ int min_proc_rate = std::min(fwd_in_format_.rate(), fwd_out_format_.rate());
+ int fwd_proc_rate;
+ if (min_proc_rate > kSampleRate16kHz) {
+ fwd_proc_rate = kSampleRate32kHz;
+ } else if (min_proc_rate > kSampleRate8kHz) {
+ fwd_proc_rate = kSampleRate16kHz;
+ } else {
+ fwd_proc_rate = kSampleRate8kHz;
}
- if (rate != kSampleRate8kHz &&
- rate != kSampleRate16kHz &&
- rate != kSampleRate32kHz) {
- return kBadParameterError;
+ // ...with one exception.
+ if (echo_control_mobile_->is_enabled() && min_proc_rate > kSampleRate16kHz) {
+ fwd_proc_rate = kSampleRate16kHz;
}
- if (echo_control_mobile_->is_enabled() && rate > kSampleRate16kHz) {
- LOG(LS_ERROR) << "AECM only supports 16 kHz or lower sample rates";
- return kUnsupportedComponentError;
+
+ fwd_proc_format_.set(fwd_proc_rate, num_output_channels);
+
+ // We normally process the reverse stream at 16 kHz. Unless...
+ int rev_proc_rate = kSampleRate16kHz;
+ if (fwd_proc_format_.rate() == kSampleRate8kHz) {
+ // ...the forward stream is at 8 kHz.
+ rev_proc_rate = kSampleRate8kHz;
+ } else {
+ if (rev_in_format_.rate() == kSampleRate32kHz) {
+ // ...or the input is at 32 kHz, in which case we use the splitting
+ // filter rather than the resampler.
+ rev_proc_rate = kSampleRate32kHz;
+ }
}
- sample_rate_hz_ = rate;
- samples_per_channel_ = rate / 100;
+ // TODO(ajm): Enable this.
+ // Always downmix the reverse stream to mono for analysis.
+ //rev_proc_format_.set(rev_proc_rate, 1);
+ rev_proc_format_.set(rev_proc_rate, rev_in_format_.num_channels());
- if (sample_rate_hz_ == kSampleRate32kHz) {
- split_sample_rate_hz_ = kSampleRate16kHz;
+ if (fwd_proc_format_.rate() == kSampleRate32kHz) {
+ split_rate_ = kSampleRate16kHz;
} else {
- split_sample_rate_hz_ = sample_rate_hz_;
+ split_rate_ = fwd_proc_format_.rate();
}
return InitializeLocked();
}
-int AudioProcessingImpl::sample_rate_hz() const {
+// Calls InitializeLocked() if any of the audio parameters have changed from
+// their current values.
+int AudioProcessingImpl::MaybeInitializeLocked(int input_sample_rate_hz,
+ int output_sample_rate_hz,
+ int reverse_sample_rate_hz,
+ int num_input_channels,
+ int num_output_channels,
+ int num_reverse_channels) {
+ if (input_sample_rate_hz == fwd_in_format_.rate() &&
+ output_sample_rate_hz == fwd_out_format_.rate() &&
+ reverse_sample_rate_hz == rev_in_format_.rate() &&
+ num_input_channels == fwd_in_format_.num_channels() &&
+ num_output_channels == fwd_proc_format_.num_channels() &&
+ num_reverse_channels == rev_in_format_.num_channels()) {
+ return kNoError;
+ }
+
+ return InitializeLocked(input_sample_rate_hz,
+ output_sample_rate_hz,
+ reverse_sample_rate_hz,
+ num_input_channels,
+ num_output_channels,
+ num_reverse_channels);
+}
+
+void AudioProcessingImpl::SetExtraOptions(const Config& config) {
CriticalSectionScoped crit_scoped(crit_);
- return sample_rate_hz_;
+ std::list<ProcessingComponent*>::iterator it;
+ for (it = component_list_.begin(); it != component_list_.end(); ++it)
+ (*it)->SetExtraOptions(config);
+}
+
+int AudioProcessingImpl::EnableExperimentalNs(bool enable) {
+ return kNoError;
}
-int AudioProcessingImpl::set_num_reverse_channels(int channels) {
+int AudioProcessingImpl::input_sample_rate_hz() const {
CriticalSectionScoped crit_scoped(crit_);
- if (channels == num_reverse_channels_) {
- return kNoError;
- }
- // Only stereo supported currently.
- if (channels > 2 || channels < 1) {
- return kBadParameterError;
- }
+ return fwd_in_format_.rate();
+}
- num_reverse_channels_ = channels;
+int AudioProcessingImpl::sample_rate_hz() const {
+ CriticalSectionScoped crit_scoped(crit_);
+ return fwd_in_format_.rate();
+}
- return InitializeLocked();
+int AudioProcessingImpl::proc_sample_rate_hz() const {
+ return fwd_proc_format_.rate();
+}
+
+int AudioProcessingImpl::proc_split_sample_rate_hz() const {
+ return split_rate_;
}
int AudioProcessingImpl::num_reverse_channels() const {
- return num_reverse_channels_;
+ return rev_proc_format_.num_channels();
+}
+
+int AudioProcessingImpl::num_input_channels() const {
+ return fwd_in_format_.num_channels();
+}
+
+int AudioProcessingImpl::num_output_channels() const {
+ return fwd_proc_format_.num_channels();
+}
+
+void AudioProcessingImpl::set_output_will_be_muted(bool muted) {
+ output_will_be_muted_ = muted;
}
-int AudioProcessingImpl::set_num_channels(
- int input_channels,
- int output_channels) {
+bool AudioProcessingImpl::output_will_be_muted() const {
+ return output_will_be_muted_;
+}
+
+int AudioProcessingImpl::ProcessStream(const float* const* src,
+ int samples_per_channel,
+ int input_sample_rate_hz,
+ ChannelLayout input_layout,
+ int output_sample_rate_hz,
+ ChannelLayout output_layout,
+ float* const* dest) {
CriticalSectionScoped crit_scoped(crit_);
- if (input_channels == num_input_channels_ &&
- output_channels == num_output_channels_) {
- return kNoError;
- }
- if (output_channels > input_channels) {
- return kBadParameterError;
+ if (!src || !dest) {
+ return kNullPointerError;
}
- // Only stereo supported currently.
- if (input_channels > 2 || input_channels < 1 ||
- output_channels > 2 || output_channels < 1) {
- return kBadParameterError;
+
+ RETURN_ON_ERR(MaybeInitializeLocked(input_sample_rate_hz,
+ output_sample_rate_hz,
+ rev_in_format_.rate(),
+ ChannelsFromLayout(input_layout),
+ ChannelsFromLayout(output_layout),
+ rev_in_format_.num_channels()));
+ if (samples_per_channel != fwd_in_format_.samples_per_channel()) {
+ return kBadDataLengthError;
}
- num_input_channels_ = input_channels;
- num_output_channels_ = output_channels;
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+ if (debug_file_->Open()) {
+ event_msg_->set_type(audioproc::Event::STREAM);
+ audioproc::Stream* msg = event_msg_->mutable_stream();
+ const size_t channel_size = sizeof(float) * samples_per_channel;
+ for (int i = 0; i < fwd_in_format_.num_channels(); ++i)
+ msg->add_input_channel(src[i], channel_size);
+ }
+#endif
- return InitializeLocked();
-}
+ capture_audio_->CopyFrom(src, samples_per_channel, input_layout);
+ RETURN_ON_ERR(ProcessStreamLocked());
+ if (output_copy_needed(is_data_processed())) {
+ capture_audio_->CopyTo(fwd_out_format_.samples_per_channel(),
+ output_layout,
+ dest);
+ }
-int AudioProcessingImpl::num_input_channels() const {
- return num_input_channels_;
-}
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+ if (debug_file_->Open()) {
+ audioproc::Stream* msg = event_msg_->mutable_stream();
+ const size_t channel_size = sizeof(float) * samples_per_channel;
+ for (int i = 0; i < fwd_proc_format_.num_channels(); ++i)
+ msg->add_output_channel(dest[i], channel_size);
+ RETURN_ON_ERR(WriteMessageToDebugFile());
+ }
+#endif
-int AudioProcessingImpl::num_output_channels() const {
- return num_output_channels_;
+ return kNoError;
}
int AudioProcessingImpl::ProcessStream(AudioFrame* frame) {
CriticalSectionScoped crit_scoped(crit_);
- int err = kNoError;
-
- if (frame == NULL) {
+ if (!frame) {
return kNullPointerError;
}
-
- if (frame->sample_rate_hz_ != sample_rate_hz_) {
+ // Must be a native rate.
+ if (frame->sample_rate_hz_ != kSampleRate8kHz &&
+ frame->sample_rate_hz_ != kSampleRate16kHz &&
+ frame->sample_rate_hz_ != kSampleRate32kHz) {
return kBadSampleRateError;
}
-
- if (frame->num_channels_ != num_input_channels_) {
- return kBadNumberChannelsError;
+ if (echo_control_mobile_->is_enabled() &&
+ frame->sample_rate_hz_ > kSampleRate16kHz) {
+ LOG(LS_ERROR) << "AECM only supports 16 or 8 kHz sample rates";
+ return kUnsupportedComponentError;
}
- if (frame->samples_per_channel_ != samples_per_channel_) {
+ // TODO(ajm): The input and output rates and channels are currently
+ // constrained to be identical in the int16 interface.
+ RETURN_ON_ERR(MaybeInitializeLocked(frame->sample_rate_hz_,
+ frame->sample_rate_hz_,
+ rev_in_format_.rate(),
+ frame->num_channels_,
+ frame->num_channels_,
+ rev_in_format_.num_channels()));
+ if (frame->samples_per_channel_ != fwd_in_format_.samples_per_channel()) {
return kBadDataLengthError;
}
@@ -300,126 +435,142 @@ int AudioProcessingImpl::ProcessStream(AudioFrame* frame) {
frame->samples_per_channel_ *
frame->num_channels_;
msg->set_input_data(frame->data_, data_size);
- msg->set_delay(stream_delay_ms_);
- msg->set_drift(echo_cancellation_->stream_drift_samples());
- msg->set_level(gain_control_->stream_analog_level());
}
#endif
capture_audio_->DeinterleaveFrom(frame);
+ RETURN_ON_ERR(ProcessStreamLocked());
+ capture_audio_->InterleaveTo(frame, output_copy_needed(is_data_processed()));
- // TODO(ajm): experiment with mixing and AEC placement.
- if (num_output_channels_ < num_input_channels_) {
- capture_audio_->Mix(num_output_channels_);
- frame->num_channels_ = num_output_channels_;
- }
-
- bool data_processed = is_data_processed();
- if (analysis_needed(data_processed)) {
- for (int i = 0; i < num_output_channels_; i++) {
- // Split into a low and high band.
- SplittingFilterAnalysis(capture_audio_->data(i),
- capture_audio_->low_pass_split_data(i),
- capture_audio_->high_pass_split_data(i),
- capture_audio_->analysis_filter_state1(i),
- capture_audio_->analysis_filter_state2(i));
- }
- }
-
- err = high_pass_filter_->ProcessCaptureAudio(capture_audio_);
- if (err != kNoError) {
- return err;
- }
-
- err = gain_control_->AnalyzeCaptureAudio(capture_audio_);
- if (err != kNoError) {
- return err;
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+ if (debug_file_->Open()) {
+ audioproc::Stream* msg = event_msg_->mutable_stream();
+ const size_t data_size = sizeof(int16_t) *
+ frame->samples_per_channel_ *
+ frame->num_channels_;
+ msg->set_output_data(frame->data_, data_size);
+ RETURN_ON_ERR(WriteMessageToDebugFile());
}
+#endif
- err = echo_cancellation_->ProcessCaptureAudio(capture_audio_);
- if (err != kNoError) {
- return err;
- }
+ return kNoError;
+}
- if (echo_control_mobile_->is_enabled() &&
- noise_suppression_->is_enabled()) {
- capture_audio_->CopyLowPassToReference();
- }
- err = noise_suppression_->ProcessCaptureAudio(capture_audio_);
- if (err != kNoError) {
- return err;
+int AudioProcessingImpl::ProcessStreamLocked() {
+#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
+ if (debug_file_->Open()) {
+ audioproc::Stream* msg = event_msg_->mutable_stream();
+ msg->set_delay(stream_delay_ms_);
+ msg->set_drift(echo_cancellation_->stream_drift_samples());
+ msg->set_level(gain_control_->stream_analog_level());
+ msg->set_keypress(key_pressed_);
}
+#endif
- err = echo_control_mobile_->ProcessCaptureAudio(capture_audio_);
- if (err != kNoError) {
- return err;
+ AudioBuffer* ca = capture_audio_.get(); // For brevity.
+ bool data_processed = is_data_processed();
+ if (analysis_needed(data_processed)) {
+ for (int i = 0; i < fwd_proc_format_.num_channels(); i++) {
+ // Split into a low and high band.
+ WebRtcSpl_AnalysisQMF(ca->data(i),
+ ca->samples_per_channel(),
+ ca->low_pass_split_data(i),
+ ca->high_pass_split_data(i),
+ ca->filter_states(i)->analysis_filter_state1,
+ ca->filter_states(i)->analysis_filter_state2);
+ }
}
- err = voice_detection_->ProcessCaptureAudio(capture_audio_);
- if (err != kNoError) {
- return err;
- }
+ RETURN_ON_ERR(high_pass_filter_->ProcessCaptureAudio(ca));
+ RETURN_ON_ERR(gain_control_->AnalyzeCaptureAudio(ca));
+ RETURN_ON_ERR(echo_cancellation_->ProcessCaptureAudio(ca));
- err = gain_control_->ProcessCaptureAudio(capture_audio_);
- if (err != kNoError) {
- return err;
+ if (echo_control_mobile_->is_enabled() && noise_suppression_->is_enabled()) {
+ ca->CopyLowPassToReference();
}
+ RETURN_ON_ERR(noise_suppression_->ProcessCaptureAudio(ca));
+ RETURN_ON_ERR(echo_control_mobile_->ProcessCaptureAudio(ca));
+ RETURN_ON_ERR(voice_detection_->ProcessCaptureAudio(ca));
+ RETURN_ON_ERR(gain_control_->ProcessCaptureAudio(ca));
if (synthesis_needed(data_processed)) {
- for (int i = 0; i < num_output_channels_; i++) {
+ for (int i = 0; i < fwd_proc_format_.num_channels(); i++) {
// Recombine low and high bands.
- SplittingFilterSynthesis(capture_audio_->low_pass_split_data(i),
- capture_audio_->high_pass_split_data(i),
- capture_audio_->data(i),
- capture_audio_->synthesis_filter_state1(i),
- capture_audio_->synthesis_filter_state2(i));
+ WebRtcSpl_SynthesisQMF(ca->low_pass_split_data(i),
+ ca->high_pass_split_data(i),
+ ca->samples_per_split_channel(),
+ ca->data(i),
+ ca->filter_states(i)->synthesis_filter_state1,
+ ca->filter_states(i)->synthesis_filter_state2);
}
}
// The level estimator operates on the recombined data.
- err = level_estimator_->ProcessStream(capture_audio_);
- if (err != kNoError) {
- return err;
+ RETURN_ON_ERR(level_estimator_->ProcessStream(ca));
+
+ was_stream_delay_set_ = false;
+ return kNoError;
+}
+
+int AudioProcessingImpl::AnalyzeReverseStream(const float* const* data,
+ int samples_per_channel,
+ int sample_rate_hz,
+ ChannelLayout layout) {
+ CriticalSectionScoped crit_scoped(crit_);
+ if (data == NULL) {
+ return kNullPointerError;
}
- capture_audio_->InterleaveTo(frame, interleave_needed(data_processed));
+ const int num_channels = ChannelsFromLayout(layout);
+ RETURN_ON_ERR(MaybeInitializeLocked(fwd_in_format_.rate(),
+ fwd_out_format_.rate(),
+ sample_rate_hz,
+ fwd_in_format_.num_channels(),
+ fwd_proc_format_.num_channels(),
+ num_channels));
+ if (samples_per_channel != rev_in_format_.samples_per_channel()) {
+ return kBadDataLengthError;
+ }
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
if (debug_file_->Open()) {
- audioproc::Stream* msg = event_msg_->mutable_stream();
- const size_t data_size = sizeof(int16_t) *
- frame->samples_per_channel_ *
- frame->num_channels_;
- msg->set_output_data(frame->data_, data_size);
- err = WriteMessageToDebugFile();
- if (err != kNoError) {
- return err;
- }
+ event_msg_->set_type(audioproc::Event::REVERSE_STREAM);
+ audioproc::ReverseStream* msg = event_msg_->mutable_reverse_stream();
+ const size_t channel_size = sizeof(float) * samples_per_channel;
+ for (int i = 0; i < num_channels; ++i)
+ msg->add_channel(data[i], channel_size);
+ RETURN_ON_ERR(WriteMessageToDebugFile());
}
#endif
- was_stream_delay_set_ = false;
- return kNoError;
+ render_audio_->CopyFrom(data, samples_per_channel, layout);
+ return AnalyzeReverseStreamLocked();
}
int AudioProcessingImpl::AnalyzeReverseStream(AudioFrame* frame) {
CriticalSectionScoped crit_scoped(crit_);
- int err = kNoError;
-
if (frame == NULL) {
return kNullPointerError;
}
-
- if (frame->sample_rate_hz_ != sample_rate_hz_) {
+ // Must be a native rate.
+ if (frame->sample_rate_hz_ != kSampleRate8kHz &&
+ frame->sample_rate_hz_ != kSampleRate16kHz &&
+ frame->sample_rate_hz_ != kSampleRate32kHz) {
return kBadSampleRateError;
}
-
- if (frame->num_channels_ != num_reverse_channels_) {
- return kBadNumberChannelsError;
+ // This interface does not tolerate different forward and reverse rates.
+ if (frame->sample_rate_hz_ != fwd_in_format_.rate()) {
+ return kBadSampleRateError;
}
- if (frame->samples_per_channel_ != samples_per_channel_) {
+ RETURN_ON_ERR(MaybeInitializeLocked(fwd_in_format_.rate(),
+ fwd_out_format_.rate(),
+ frame->sample_rate_hz_,
+ fwd_in_format_.num_channels(),
+ fwd_in_format_.num_channels(),
+ frame->num_channels_));
+ if (frame->samples_per_channel_ != rev_in_format_.samples_per_channel()) {
return kBadDataLengthError;
}
@@ -431,44 +582,33 @@ int AudioProcessingImpl::AnalyzeReverseStream(AudioFrame* frame) {
frame->samples_per_channel_ *
frame->num_channels_;
msg->set_data(frame->data_, data_size);
- err = WriteMessageToDebugFile();
- if (err != kNoError) {
- return err;
- }
+ RETURN_ON_ERR(WriteMessageToDebugFile());
}
#endif
render_audio_->DeinterleaveFrom(frame);
+ return AnalyzeReverseStreamLocked();
+}
- // TODO(ajm): turn the splitting filter into a component?
- if (sample_rate_hz_ == kSampleRate32kHz) {
- for (int i = 0; i < num_reverse_channels_; i++) {
+int AudioProcessingImpl::AnalyzeReverseStreamLocked() {
+ AudioBuffer* ra = render_audio_.get(); // For brevity.
+ if (rev_proc_format_.rate() == kSampleRate32kHz) {
+ for (int i = 0; i < rev_proc_format_.num_channels(); i++) {
// Split into low and high band.
- SplittingFilterAnalysis(render_audio_->data(i),
- render_audio_->low_pass_split_data(i),
- render_audio_->high_pass_split_data(i),
- render_audio_->analysis_filter_state1(i),
- render_audio_->analysis_filter_state2(i));
+ WebRtcSpl_AnalysisQMF(ra->data(i),
+ ra->samples_per_channel(),
+ ra->low_pass_split_data(i),
+ ra->high_pass_split_data(i),
+ ra->filter_states(i)->analysis_filter_state1,
+ ra->filter_states(i)->analysis_filter_state2);
}
}
- // TODO(ajm): warnings possible from components?
- err = echo_cancellation_->ProcessRenderAudio(render_audio_);
- if (err != kNoError) {
- return err;
- }
-
- err = echo_control_mobile_->ProcessRenderAudio(render_audio_);
- if (err != kNoError) {
- return err;
- }
+ RETURN_ON_ERR(echo_cancellation_->ProcessRenderAudio(ra));
+ RETURN_ON_ERR(echo_control_mobile_->ProcessRenderAudio(ra));
+ RETURN_ON_ERR(gain_control_->ProcessRenderAudio(ra));
- err = gain_control_->ProcessRenderAudio(render_audio_);
- if (err != kNoError) {
- return err;
- }
-
- return err; // TODO(ajm): this is for returning warnings; necessary?
+ return kNoError;
}
int AudioProcessingImpl::set_stream_delay_ms(int delay) {
@@ -499,6 +639,14 @@ bool AudioProcessingImpl::was_stream_delay_set() const {
return was_stream_delay_set_;
}
+void AudioProcessingImpl::set_stream_key_pressed(bool key_pressed) {
+ key_pressed_ = key_pressed;
+}
+
+bool AudioProcessingImpl::stream_key_pressed() const {
+ return key_pressed_;
+}
+
void AudioProcessingImpl::set_delay_offset_ms(int offset) {
CriticalSectionScoped crit_scoped(crit_);
delay_offset_ms_ = offset;
@@ -613,13 +761,6 @@ VoiceDetection* AudioProcessingImpl::voice_detection() const {
return voice_detection_;
}
-int32_t AudioProcessingImpl::ChangeUniqueId(const int32_t id) {
- CriticalSectionScoped crit_scoped(crit_);
- id_ = id;
-
- return kNoError;
-}
-
bool AudioProcessingImpl::is_data_processed() const {
int enabled_count = 0;
std::list<ProcessingComponent*>::const_iterator it;
@@ -645,20 +786,21 @@ bool AudioProcessingImpl::is_data_processed() const {
return true;
}
-bool AudioProcessingImpl::interleave_needed(bool is_data_processed) const {
+bool AudioProcessingImpl::output_copy_needed(bool is_data_processed) const {
// Check if we've upmixed or downmixed the audio.
- return (num_output_channels_ != num_input_channels_ || is_data_processed);
+ return ((fwd_proc_format_.num_channels() != fwd_in_format_.num_channels()) ||
+ is_data_processed);
}
bool AudioProcessingImpl::synthesis_needed(bool is_data_processed) const {
- return (is_data_processed && sample_rate_hz_ == kSampleRate32kHz);
+ return (is_data_processed && fwd_proc_format_.rate() == kSampleRate32kHz);
}
bool AudioProcessingImpl::analysis_needed(bool is_data_processed) const {
if (!is_data_processed && !voice_detection_->is_enabled()) {
// Only level_estimator_ is enabled.
return false;
- } else if (sample_rate_hz_ == kSampleRate32kHz) {
+ } else if (fwd_proc_format_.rate() == kSampleRate32kHz) {
// Something besides level_estimator_ is enabled, and we have super-wb.
return true;
}
@@ -690,17 +832,18 @@ int AudioProcessingImpl::WriteMessageToDebugFile() {
event_msg_->Clear();
- return 0;
+ return kNoError;
}
int AudioProcessingImpl::WriteInitMessage() {
event_msg_->set_type(audioproc::Event::INIT);
audioproc::Init* msg = event_msg_->mutable_init();
- msg->set_sample_rate(sample_rate_hz_);
- msg->set_device_sample_rate(echo_cancellation_->device_sample_rate_hz());
- msg->set_num_input_channels(num_input_channels_);
- msg->set_num_output_channels(num_output_channels_);
- msg->set_num_reverse_channels(num_reverse_channels_);
+ msg->set_sample_rate(fwd_in_format_.rate());
+ msg->set_num_input_channels(fwd_in_format_.num_channels());
+ msg->set_num_output_channels(fwd_proc_format_.num_channels());
+ msg->set_num_reverse_channels(rev_in_format_.num_channels());
+ msg->set_reverse_sample_rate(rev_in_format_.rate());
+ msg->set_output_sample_rate(fwd_out_format_.rate());
int err = WriteMessageToDebugFile();
if (err != kNoError) {
@@ -710,4 +853,5 @@ int AudioProcessingImpl::WriteInitMessage() {
return kNoError;
}
#endif // WEBRTC_AUDIOPROC_DEBUG_DUMP
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h
index e48a2c18a4f..d34f305a96b 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl.h
@@ -19,9 +19,10 @@
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
+
class AudioBuffer;
class CriticalSectionWrapper;
-class EchoCancellationImplWrapper;
+class EchoCancellationImpl;
class EchoControlMobileImpl;
class FileWrapper;
class GainControlImpl;
@@ -39,44 +40,92 @@ class Event;
} // namespace audioproc
#endif
-class AudioProcessingImpl : public AudioProcessing {
+class AudioRate {
public:
- enum {
- kSampleRate8kHz = 8000,
- kSampleRate16kHz = 16000,
- kSampleRate32kHz = 32000
- };
+ explicit AudioRate(int sample_rate_hz)
+ : rate_(sample_rate_hz),
+ samples_per_channel_(AudioProcessing::kChunkSizeMs * rate_ / 1000) {}
+ virtual ~AudioRate() {}
+
+ void set(int rate) {
+ rate_ = rate;
+ samples_per_channel_ = AudioProcessing::kChunkSizeMs * rate_ / 1000;
+ }
- explicit AudioProcessingImpl(int id);
- virtual ~AudioProcessingImpl();
+ int rate() const { return rate_; }
+ int samples_per_channel() const { return samples_per_channel_; }
- CriticalSectionWrapper* crit() const;
+ private:
+ int rate_;
+ int samples_per_channel_;
+};
- int split_sample_rate_hz() const;
- bool was_stream_delay_set() const;
+class AudioFormat : public AudioRate {
+ public:
+ AudioFormat(int sample_rate_hz, int num_channels)
+ : AudioRate(sample_rate_hz),
+ num_channels_(num_channels) {}
+ virtual ~AudioFormat() {}
+
+ void set(int rate, int num_channels) {
+ AudioRate::set(rate);
+ num_channels_ = num_channels;
+ }
+
+ int num_channels() const { return num_channels_; }
+
+ private:
+ int num_channels_;
+};
+
+class AudioProcessingImpl : public AudioProcessing {
+ public:
+ explicit AudioProcessingImpl(const Config& config);
+ virtual ~AudioProcessingImpl();
// AudioProcessing methods.
virtual int Initialize() OVERRIDE;
- virtual int InitializeLocked();
+ virtual int Initialize(int input_sample_rate_hz,
+ int output_sample_rate_hz,
+ int reverse_sample_rate_hz,
+ ChannelLayout input_layout,
+ ChannelLayout output_layout,
+ ChannelLayout reverse_layout) OVERRIDE;
virtual void SetExtraOptions(const Config& config) OVERRIDE;
virtual int EnableExperimentalNs(bool enable) OVERRIDE;
virtual bool experimental_ns_enabled() const OVERRIDE {
return false;
}
virtual int set_sample_rate_hz(int rate) OVERRIDE;
+ virtual int input_sample_rate_hz() const OVERRIDE;
virtual int sample_rate_hz() const OVERRIDE;
- virtual int set_num_channels(int input_channels,
- int output_channels) OVERRIDE;
+ virtual int proc_sample_rate_hz() const OVERRIDE;
+ virtual int proc_split_sample_rate_hz() const OVERRIDE;
virtual int num_input_channels() const OVERRIDE;
virtual int num_output_channels() const OVERRIDE;
- virtual int set_num_reverse_channels(int channels) OVERRIDE;
virtual int num_reverse_channels() const OVERRIDE;
+ virtual void set_output_will_be_muted(bool muted) OVERRIDE;
+ virtual bool output_will_be_muted() const OVERRIDE;
virtual int ProcessStream(AudioFrame* frame) OVERRIDE;
+ virtual int ProcessStream(const float* const* src,
+ int samples_per_channel,
+ int input_sample_rate_hz,
+ ChannelLayout input_layout,
+ int output_sample_rate_hz,
+ ChannelLayout output_layout,
+ float* const* dest) OVERRIDE;
virtual int AnalyzeReverseStream(AudioFrame* frame) OVERRIDE;
+ virtual int AnalyzeReverseStream(const float* const* data,
+ int samples_per_channel,
+ int sample_rate_hz,
+ ChannelLayout layout) OVERRIDE;
virtual int set_stream_delay_ms(int delay) OVERRIDE;
virtual int stream_delay_ms() const OVERRIDE;
+ virtual bool was_stream_delay_set() const OVERRIDE;
virtual void set_delay_offset_ms(int offset) OVERRIDE;
virtual int delay_offset_ms() const OVERRIDE;
+ virtual void set_stream_key_pressed(bool key_pressed) OVERRIDE;
+ virtual bool stream_key_pressed() const OVERRIDE;
virtual int StartDebugRecording(
const char filename[kMaxFilenameSize]) OVERRIDE;
virtual int StartDebugRecording(FILE* handle) OVERRIDE;
@@ -89,18 +138,32 @@ class AudioProcessingImpl : public AudioProcessing {
virtual NoiseSuppression* noise_suppression() const OVERRIDE;
virtual VoiceDetection* voice_detection() const OVERRIDE;
- // Module methods.
- virtual int32_t ChangeUniqueId(const int32_t id) OVERRIDE;
+ protected:
+ // Overridden in a mock.
+ virtual int InitializeLocked();
private:
+ int InitializeLocked(int input_sample_rate_hz,
+ int output_sample_rate_hz,
+ int reverse_sample_rate_hz,
+ int num_input_channels,
+ int num_output_channels,
+ int num_reverse_channels);
+ int MaybeInitializeLocked(int input_sample_rate_hz,
+ int output_sample_rate_hz,
+ int reverse_sample_rate_hz,
+ int num_input_channels,
+ int num_output_channels,
+ int num_reverse_channels);
+ int ProcessStreamLocked();
+ int AnalyzeReverseStreamLocked();
+
bool is_data_processed() const;
- bool interleave_needed(bool is_data_processed) const;
+ bool output_copy_needed(bool is_data_processed) const;
bool synthesis_needed(bool is_data_processed) const;
bool analysis_needed(bool is_data_processed) const;
- int id_;
-
- EchoCancellationImplWrapper* echo_cancellation_;
+ EchoCancellationImpl* echo_cancellation_;
EchoControlMobileImpl* echo_control_mobile_;
GainControlImpl* gain_control_;
HighPassFilterImpl* high_pass_filter_;
@@ -110,29 +173,34 @@ class AudioProcessingImpl : public AudioProcessing {
std::list<ProcessingComponent*> component_list_;
CriticalSectionWrapper* crit_;
- AudioBuffer* render_audio_;
- AudioBuffer* capture_audio_;
+ scoped_ptr<AudioBuffer> render_audio_;
+ scoped_ptr<AudioBuffer> capture_audio_;
#ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
// TODO(andrew): make this more graceful. Ideally we would split this stuff
// out into a separate class with an "enabled" and "disabled" implementation.
int WriteMessageToDebugFile();
int WriteInitMessage();
scoped_ptr<FileWrapper> debug_file_;
- scoped_ptr<audioproc::Event> event_msg_; // Protobuf message.
- std::string event_str_; // Memory for protobuf serialization.
+ scoped_ptr<audioproc::Event> event_msg_; // Protobuf message.
+ std::string event_str_; // Memory for protobuf serialization.
#endif
- int sample_rate_hz_;
- int split_sample_rate_hz_;
- int samples_per_channel_;
+ AudioFormat fwd_in_format_;
+ AudioFormat fwd_proc_format_;
+ AudioRate fwd_out_format_;
+ AudioFormat rev_in_format_;
+ AudioFormat rev_proc_format_;
+ int split_rate_;
+
int stream_delay_ms_;
int delay_offset_ms_;
bool was_stream_delay_set_;
- int num_reverse_channels_;
- int num_input_channels_;
- int num_output_channels_;
+ bool output_will_be_muted_;
+
+ bool key_pressed_;
};
+
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_AUDIO_PROCESSING_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc
new file mode 100644
index 00000000000..09576175756
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_impl_unittest.cc
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_processing/audio_processing_impl.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/config.h"
+#include "webrtc/modules/audio_processing/test/test_utils.h"
+#include "webrtc/modules/interface/module_common_types.h"
+
+using ::testing::Invoke;
+using ::testing::Return;
+
+namespace webrtc {
+
+class MockInitialize : public AudioProcessingImpl {
+ public:
+ explicit MockInitialize(const Config& config) : AudioProcessingImpl(config) {
+ }
+
+ MOCK_METHOD0(InitializeLocked, int());
+ int RealInitializeLocked() { return AudioProcessingImpl::InitializeLocked(); }
+};
+
+TEST(AudioProcessingImplTest, AudioParameterChangeTriggersInit) {
+ Config config;
+ MockInitialize mock(config);
+ ON_CALL(mock, InitializeLocked())
+ .WillByDefault(Invoke(&mock, &MockInitialize::RealInitializeLocked));
+
+ EXPECT_CALL(mock, InitializeLocked()).Times(1);
+ mock.Initialize();
+
+ AudioFrame frame;
+ // Call with the default parameters; there should be no init.
+ frame.num_channels_ = 1;
+ SetFrameSampleRate(&frame, 16000);
+ EXPECT_CALL(mock, InitializeLocked())
+ .Times(0);
+ EXPECT_NOERR(mock.ProcessStream(&frame));
+ EXPECT_NOERR(mock.AnalyzeReverseStream(&frame));
+
+ // New sample rate. (Only impacts ProcessStream).
+ SetFrameSampleRate(&frame, 32000);
+ EXPECT_CALL(mock, InitializeLocked())
+ .Times(1);
+ EXPECT_NOERR(mock.ProcessStream(&frame));
+
+ // New number of channels.
+ frame.num_channels_ = 2;
+ EXPECT_CALL(mock, InitializeLocked())
+ .Times(2);
+ EXPECT_NOERR(mock.ProcessStream(&frame));
+ // ProcessStream sets num_channels_ == num_output_channels.
+ frame.num_channels_ = 2;
+ EXPECT_NOERR(mock.AnalyzeReverseStream(&frame));
+
+ // A new sample rate passed to AnalyzeReverseStream should be an error and
+ // not cause an init.
+ SetFrameSampleRate(&frame, 16000);
+ EXPECT_CALL(mock, InitializeLocked())
+ .Times(0);
+ EXPECT_EQ(mock.kBadSampleRateError, mock.AnalyzeReverseStream(&frame));
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_tests.gypi b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_tests.gypi
index 05d7514bded..82aa7fd14fd 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/audio_processing_tests.gypi
+++ b/chromium/third_party/webrtc/modules/audio_processing/audio_processing_tests.gypi
@@ -7,25 +7,23 @@
# be found in the AUTHORS file in the root of the source tree.
{
- 'targets': [
- {
- 'target_name': 'audioproc_unittest_proto',
- 'type': 'static_library',
- 'sources': [ 'test/unittest.proto', ],
- 'variables': {
- 'proto_in_dir': 'test',
- # Workaround to protect against gyp's pathname relativization when this
- # file is included by modules.gyp.
- 'proto_out_protected': 'webrtc/audio_processing',
- 'proto_out_dir': '<(proto_out_protected)',
- },
- 'includes': [ '../../build/protoc.gypi', ],
- },
- ],
'conditions': [
['enable_protobuf==1', {
'targets': [
{
+ 'target_name': 'audioproc_unittest_proto',
+ 'type': 'static_library',
+ 'sources': [ 'test/unittest.proto', ],
+ 'variables': {
+ 'proto_in_dir': 'test',
+ # Workaround to protect against gyp's pathname relativization when
+ # this file is included by modules.gyp.
+ 'proto_out_protected': 'webrtc/audio_processing',
+ 'proto_out_dir': '<(proto_out_protected)',
+ },
+ 'includes': [ '../../build/protoc.gypi', ],
+ },
+ {
'target_name': 'audioproc',
'type': 'executable',
'dependencies': [
diff --git a/chromium/third_party/webrtc/modules/audio_processing/common.h b/chromium/third_party/webrtc/modules/audio_processing/common.h
new file mode 100644
index 00000000000..42454df299f
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/common.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_COMMON_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_COMMON_H_
+
+#include <assert.h>
+#include <string.h>
+
+#include "webrtc/modules/audio_processing/include/audio_processing.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+static inline int ChannelsFromLayout(AudioProcessing::ChannelLayout layout) {
+ switch (layout) {
+ case AudioProcessing::kMono:
+ case AudioProcessing::kMonoAndKeyboard:
+ return 1;
+ case AudioProcessing::kStereo:
+ case AudioProcessing::kStereoAndKeyboard:
+ return 2;
+ }
+ assert(false);
+ return -1;
+}
+
+// Helper to encapsulate a contiguous data buffer with access to a pointer
+// array of the deinterleaved channels.
+template <typename T>
+class ChannelBuffer {
+ public:
+ ChannelBuffer(int samples_per_channel, int num_channels)
+ : data_(new T[samples_per_channel * num_channels]),
+ channels_(new T*[num_channels]),
+ samples_per_channel_(samples_per_channel),
+ num_channels_(num_channels) {
+ memset(data_.get(), 0, sizeof(T) * samples_per_channel * num_channels);
+ for (int i = 0; i < num_channels; ++i)
+ channels_[i] = &data_[i * samples_per_channel];
+ }
+ ~ChannelBuffer() {}
+
+ void CopyFrom(const void* channel_ptr, int i) {
+ assert(i < num_channels_);
+ memcpy(channels_[i], channel_ptr, samples_per_channel_ * sizeof(T));
+ }
+
+ T* data() { return data_.get(); }
+ T* channel(int i) {
+ assert(i < num_channels_);
+ return channels_[i];
+ }
+ T** channels() { return channels_.get(); }
+
+ int samples_per_channel() { return samples_per_channel_; }
+ int num_channels() { return num_channels_; }
+ int length() { return samples_per_channel_ * num_channels_; }
+
+ private:
+ scoped_ptr<T[]> data_;
+ scoped_ptr<T*[]> channels_;
+ int samples_per_channel_;
+ int num_channels_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_COMMON_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/debug.proto b/chromium/third_party/webrtc/modules/audio_processing/debug.proto
index 4b3a1638941..dce2f792093 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/debug.proto
+++ b/chromium/third_party/webrtc/modules/audio_processing/debug.proto
@@ -4,22 +4,39 @@ package webrtc.audioproc;
message Init {
optional int32 sample_rate = 1;
- optional int32 device_sample_rate = 2;
+ optional int32 device_sample_rate = 2 [deprecated=true];
optional int32 num_input_channels = 3;
optional int32 num_output_channels = 4;
optional int32 num_reverse_channels = 5;
+ optional int32 reverse_sample_rate = 6;
+ optional int32 output_sample_rate = 7;
}
+// May contain interleaved or deinterleaved data, but don't store both formats.
message ReverseStream {
+ // int16 interleaved data.
optional bytes data = 1;
+
+ // float deinterleaved data, where each repeated element points to a single
+ // channel buffer of data.
+ repeated bytes channel = 2;
}
+// May contain interleaved or deinterleaved data, but don't store both formats.
message Stream {
+ // int16 interleaved data.
optional bytes input_data = 1;
optional bytes output_data = 2;
+
optional int32 delay = 3;
optional sint32 drift = 4;
optional int32 level = 5;
+ optional bool keypress = 6;
+
+ // float deinterleaved data, where each repeated element points to a single
+ // channel buffer of data.
+ repeated bytes input_channel = 7;
+ repeated bytes output_channel = 8;
}
message Event {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.cc
index cd12363ec5e..e770f9fe377 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.cc
@@ -18,7 +18,6 @@ extern "C" {
}
#include "webrtc/modules/audio_processing/aec/include/echo_cancellation.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/modules/audio_processing/audio_processing_impl.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
namespace webrtc {
@@ -56,23 +55,20 @@ AudioProcessing::Error MapError(int err) {
}
} // namespace
-EchoCancellationImplWrapper* EchoCancellationImplWrapper::Create(
- const AudioProcessingImpl* audioproc) {
- return new EchoCancellationImpl(audioproc);
-}
-
-EchoCancellationImpl::EchoCancellationImpl(const AudioProcessingImpl* apm)
- : ProcessingComponent(apm),
+EchoCancellationImpl::EchoCancellationImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit)
+ : ProcessingComponent(),
apm_(apm),
+ crit_(crit),
drift_compensation_enabled_(false),
metrics_enabled_(false),
suppression_level_(kModerateSuppression),
- device_sample_rate_hz_(48000),
stream_drift_samples_(0),
was_stream_drift_set_(false),
stream_has_echo_(false),
delay_logging_enabled_(false),
- delay_correction_enabled_(false) {}
+ delay_correction_enabled_(false),
+ reported_delay_enabled_(true) {}
EchoCancellationImpl::~EchoCancellationImpl() {}
@@ -133,10 +129,10 @@ int EchoCancellationImpl::ProcessCaptureAudio(AudioBuffer* audio) {
Handle* my_handle = handle(handle_index);
err = WebRtcAec_Process(
my_handle,
- audio->low_pass_split_data(i),
- audio->high_pass_split_data(i),
- audio->low_pass_split_data(i),
- audio->high_pass_split_data(i),
+ audio->low_pass_split_data_f(i),
+ audio->high_pass_split_data_f(i),
+ audio->low_pass_split_data_f(i),
+ audio->high_pass_split_data_f(i),
static_cast<int16_t>(audio->samples_per_split_channel()),
apm_->stream_delay_ms(),
stream_drift_samples_);
@@ -168,7 +164,7 @@ int EchoCancellationImpl::ProcessCaptureAudio(AudioBuffer* audio) {
}
int EchoCancellationImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
// Ensure AEC and AECM are not both enabled.
if (enable && apm_->echo_control_mobile()->is_enabled()) {
return apm_->kBadParameterError;
@@ -182,7 +178,7 @@ bool EchoCancellationImpl::is_enabled() const {
}
int EchoCancellationImpl::set_suppression_level(SuppressionLevel level) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (MapSetting(level) == -1) {
return apm_->kBadParameterError;
}
@@ -197,7 +193,7 @@ EchoCancellation::SuppressionLevel EchoCancellationImpl::suppression_level()
}
int EchoCancellationImpl::enable_drift_compensation(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
drift_compensation_enabled_ = enable;
return Configure();
}
@@ -206,20 +202,6 @@ bool EchoCancellationImpl::is_drift_compensation_enabled() const {
return drift_compensation_enabled_;
}
-int EchoCancellationImpl::set_device_sample_rate_hz(int rate) {
- CriticalSectionScoped crit_scoped(apm_->crit());
- if (rate < 8000 || rate > 96000) {
- return apm_->kBadParameterError;
- }
-
- device_sample_rate_hz_ = rate;
- return Initialize();
-}
-
-int EchoCancellationImpl::device_sample_rate_hz() const {
- return device_sample_rate_hz_;
-}
-
void EchoCancellationImpl::set_stream_drift_samples(int drift) {
was_stream_drift_set_ = true;
stream_drift_samples_ = drift;
@@ -230,7 +212,7 @@ int EchoCancellationImpl::stream_drift_samples() const {
}
int EchoCancellationImpl::enable_metrics(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
metrics_enabled_ = enable;
return Configure();
}
@@ -242,7 +224,7 @@ bool EchoCancellationImpl::are_metrics_enabled() const {
// TODO(ajm): we currently just use the metrics from the first AEC. Think more
// aboue the best way to extend this to multi-channel.
int EchoCancellationImpl::GetMetrics(Metrics* metrics) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (metrics == NULL) {
return apm_->kNullPointerError;
}
@@ -289,7 +271,7 @@ bool EchoCancellationImpl::stream_has_echo() const {
}
int EchoCancellationImpl::enable_delay_logging(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
delay_logging_enabled_ = enable;
return Configure();
}
@@ -300,7 +282,7 @@ bool EchoCancellationImpl::is_delay_logging_enabled() const {
// TODO(bjornv): How should we handle the multi-channel case?
int EchoCancellationImpl::GetDelayMetrics(int* median, int* std) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (median == NULL) {
return apm_->kNullPointerError;
}
@@ -322,7 +304,7 @@ int EchoCancellationImpl::GetDelayMetrics(int* median, int* std) {
}
struct AecCore* EchoCancellationImpl::aec_core() const {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (!is_component_enabled()) {
return NULL;
}
@@ -336,13 +318,12 @@ int EchoCancellationImpl::Initialize() {
return err;
}
- was_stream_drift_set_ = false;
-
return apm_->kNoError;
}
void EchoCancellationImpl::SetExtraOptions(const Config& config) {
delay_correction_enabled_ = config.Get<DelayCorrection>().enabled;
+ reported_delay_enabled_ = config.Get<ReportedDelay>().enabled;
Configure();
}
@@ -357,16 +338,19 @@ void* EchoCancellationImpl::CreateHandle() const {
return handle;
}
-int EchoCancellationImpl::DestroyHandle(void* handle) const {
+void EchoCancellationImpl::DestroyHandle(void* handle) const {
assert(handle != NULL);
- return WebRtcAec_Free(static_cast<Handle*>(handle));
+ WebRtcAec_Free(static_cast<Handle*>(handle));
}
int EchoCancellationImpl::InitializeHandle(void* handle) const {
assert(handle != NULL);
+ // TODO(ajm): Drift compensation is disabled in practice. If restored, it
+ // should be managed internally and not depend on the hardware sample rate.
+ // For now, just hardcode a 48 kHz value.
return WebRtcAec_Init(static_cast<Handle*>(handle),
- apm_->sample_rate_hz(),
- device_sample_rate_hz_);
+ apm_->proc_sample_rate_hz(),
+ 48000);
}
int EchoCancellationImpl::ConfigureHandle(void* handle) const {
@@ -379,6 +363,8 @@ int EchoCancellationImpl::ConfigureHandle(void* handle) const {
WebRtcAec_enable_delay_correction(WebRtcAec_aec_core(
static_cast<Handle*>(handle)), delay_correction_enabled_ ? 1 : 0);
+ WebRtcAec_enable_reported_delay(WebRtcAec_aec_core(
+ static_cast<Handle*>(handle)), reported_delay_enabled_ ? 1 : 0);
return WebRtcAec_set_config(static_cast<Handle*>(handle), config);
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.h b/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.h
index 3ab0ce26689..b9c116a0650 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl.h
@@ -11,25 +11,26 @@
#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CANCELLATION_IMPL_H_
#define WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CANCELLATION_IMPL_H_
-#include "webrtc/modules/audio_processing/echo_cancellation_impl_wrapper.h"
+#include "webrtc/modules/audio_processing/include/audio_processing.h"
+#include "webrtc/modules/audio_processing/processing_component.h"
namespace webrtc {
-class AudioProcessingImpl;
class AudioBuffer;
+class CriticalSectionWrapper;
-class EchoCancellationImpl : public EchoCancellationImplWrapper {
+class EchoCancellationImpl : public EchoCancellation,
+ public ProcessingComponent {
public:
- explicit EchoCancellationImpl(const AudioProcessingImpl* apm);
+ EchoCancellationImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit);
virtual ~EchoCancellationImpl();
- // EchoCancellationImplWrapper implementation.
- virtual int ProcessRenderAudio(const AudioBuffer* audio) OVERRIDE;
- virtual int ProcessCaptureAudio(AudioBuffer* audio) OVERRIDE;
+ int ProcessRenderAudio(const AudioBuffer* audio);
+ int ProcessCaptureAudio(AudioBuffer* audio);
// EchoCancellation implementation.
virtual bool is_enabled() const OVERRIDE;
- virtual int device_sample_rate_hz() const OVERRIDE;
virtual int stream_drift_samples() const OVERRIDE;
// ProcessingComponent implementation.
@@ -41,7 +42,6 @@ class EchoCancellationImpl : public EchoCancellationImplWrapper {
virtual int Enable(bool enable) OVERRIDE;
virtual int enable_drift_compensation(bool enable) OVERRIDE;
virtual bool is_drift_compensation_enabled() const OVERRIDE;
- virtual int set_device_sample_rate_hz(int rate) OVERRIDE;
virtual void set_stream_drift_samples(int drift) OVERRIDE;
virtual int set_suppression_level(SuppressionLevel level) OVERRIDE;
virtual SuppressionLevel suppression_level() const OVERRIDE;
@@ -58,20 +58,21 @@ class EchoCancellationImpl : public EchoCancellationImplWrapper {
virtual void* CreateHandle() const OVERRIDE;
virtual int InitializeHandle(void* handle) const OVERRIDE;
virtual int ConfigureHandle(void* handle) const OVERRIDE;
- virtual int DestroyHandle(void* handle) const OVERRIDE;
+ virtual void DestroyHandle(void* handle) const OVERRIDE;
virtual int num_handles_required() const OVERRIDE;
virtual int GetHandleError(void* handle) const OVERRIDE;
- const AudioProcessingImpl* apm_;
+ const AudioProcessing* apm_;
+ CriticalSectionWrapper* crit_;
bool drift_compensation_enabled_;
bool metrics_enabled_;
SuppressionLevel suppression_level_;
- int device_sample_rate_hz_;
int stream_drift_samples_;
bool was_stream_drift_set_;
bool stream_has_echo_;
bool delay_logging_enabled_;
bool delay_correction_enabled_;
+ bool reported_delay_enabled_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc
index f9bc3213ff1..49bcf9459b0 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl_unittest.cc
@@ -14,6 +14,7 @@ extern "C" {
}
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/test/testsupport/gtest_disable.h"
namespace webrtc {
@@ -47,4 +48,34 @@ TEST(EchoCancellationInternalTest, DelayCorrection) {
EXPECT_EQ(0, WebRtcAec_delay_correction_enabled(aec_core));
}
+TEST(EchoCancellationInternalTest, ReportedDelay) {
+ scoped_ptr<AudioProcessing> ap(AudioProcessing::Create(0));
+ EXPECT_TRUE(ap->echo_cancellation()->aec_core() == NULL);
+
+ EXPECT_EQ(ap->kNoError, ap->echo_cancellation()->Enable(true));
+ EXPECT_TRUE(ap->echo_cancellation()->is_enabled());
+
+ AecCore* aec_core = ap->echo_cancellation()->aec_core();
+ ASSERT_TRUE(aec_core != NULL);
+ // Enabled by default.
+ EXPECT_EQ(1, WebRtcAec_reported_delay_enabled(aec_core));
+
+ Config config;
+ config.Set<ReportedDelay>(new ReportedDelay(false));
+ ap->SetExtraOptions(config);
+ EXPECT_EQ(0, WebRtcAec_reported_delay_enabled(aec_core));
+
+ // Retains setting after initialization.
+ EXPECT_EQ(ap->kNoError, ap->Initialize());
+ EXPECT_EQ(0, WebRtcAec_reported_delay_enabled(aec_core));
+
+ config.Set<ReportedDelay>(new ReportedDelay(true));
+ ap->SetExtraOptions(config);
+ EXPECT_EQ(1, WebRtcAec_reported_delay_enabled(aec_core));
+
+ // Retains setting after initialization.
+ EXPECT_EQ(ap->kNoError, ap->Initialize());
+ EXPECT_EQ(1, WebRtcAec_reported_delay_enabled(aec_core));
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl_wrapper.h b/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl_wrapper.h
deleted file mode 100644
index f1c03f32d6f..00000000000
--- a/chromium/third_party/webrtc/modules/audio_processing/echo_cancellation_impl_wrapper.h
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CANCELLATION_IMPL_WRAPPER_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CANCELLATION_IMPL_WRAPPER_H_
-
-#include "webrtc/modules/audio_processing/include/audio_processing.h"
-#include "webrtc/modules/audio_processing/processing_component.h"
-
-namespace webrtc {
-
-class AudioProcessingImpl;
-class AudioBuffer;
-
-class EchoCancellationImplWrapper : public virtual EchoCancellation,
- public virtual ProcessingComponent {
- public:
- static EchoCancellationImplWrapper* Create(
- const AudioProcessingImpl* audioproc);
- virtual ~EchoCancellationImplWrapper() {}
-
- virtual int ProcessRenderAudio(const AudioBuffer* audio) = 0;
- virtual int ProcessCaptureAudio(AudioBuffer* audio) = 0;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CANCELLATION_IMPL_WRAPPER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc
index f7853814966..a03adc5300e 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.cc
@@ -15,7 +15,6 @@
#include "webrtc/modules/audio_processing/aecm/include/echo_control_mobile.h"
#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/modules/audio_processing/audio_processing_impl.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
@@ -63,9 +62,11 @@ size_t EchoControlMobile::echo_path_size_bytes() {
return WebRtcAecm_echo_path_size_bytes();
}
-EchoControlMobileImpl::EchoControlMobileImpl(const AudioProcessingImpl* apm)
- : ProcessingComponent(apm),
+EchoControlMobileImpl::EchoControlMobileImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit)
+ : ProcessingComponent(),
apm_(apm),
+ crit_(crit),
routing_mode_(kSpeakerphone),
comfort_noise_enabled_(true),
external_echo_path_(NULL) {}
@@ -127,7 +128,7 @@ int EchoControlMobileImpl::ProcessCaptureAudio(AudioBuffer* audio) {
for (int i = 0; i < audio->num_channels(); i++) {
// TODO(ajm): improve how this works, possibly inside AECM.
// This is kind of hacked up.
- int16_t* noisy = audio->low_pass_reference(i);
+ const int16_t* noisy = audio->low_pass_reference(i);
int16_t* clean = audio->low_pass_split_data(i);
if (noisy == NULL) {
noisy = clean;
@@ -155,7 +156,7 @@ int EchoControlMobileImpl::ProcessCaptureAudio(AudioBuffer* audio) {
}
int EchoControlMobileImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
// Ensure AEC and AECM are not both enabled.
if (enable && apm_->echo_cancellation()->is_enabled()) {
return apm_->kBadParameterError;
@@ -169,7 +170,7 @@ bool EchoControlMobileImpl::is_enabled() const {
}
int EchoControlMobileImpl::set_routing_mode(RoutingMode mode) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (MapSetting(mode) == -1) {
return apm_->kBadParameterError;
}
@@ -184,7 +185,7 @@ EchoControlMobile::RoutingMode EchoControlMobileImpl::routing_mode()
}
int EchoControlMobileImpl::enable_comfort_noise(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
comfort_noise_enabled_ = enable;
return Configure();
}
@@ -195,7 +196,7 @@ bool EchoControlMobileImpl::is_comfort_noise_enabled() const {
int EchoControlMobileImpl::SetEchoPath(const void* echo_path,
size_t size_bytes) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (echo_path == NULL) {
return apm_->kNullPointerError;
}
@@ -214,7 +215,7 @@ int EchoControlMobileImpl::SetEchoPath(const void* echo_path,
int EchoControlMobileImpl::GetEchoPath(void* echo_path,
size_t size_bytes) const {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (echo_path == NULL) {
return apm_->kNullPointerError;
}
@@ -240,7 +241,7 @@ int EchoControlMobileImpl::Initialize() {
return apm_->kNoError;
}
- if (apm_->sample_rate_hz() == apm_->kSampleRate32kHz) {
+ if (apm_->proc_sample_rate_hz() > apm_->kSampleRate16kHz) {
LOG(LS_ERROR) << "AECM only supports 16 kHz or lower sample rates";
return apm_->kBadSampleRateError;
}
@@ -259,14 +260,14 @@ void* EchoControlMobileImpl::CreateHandle() const {
return handle;
}
-int EchoControlMobileImpl::DestroyHandle(void* handle) const {
- return WebRtcAecm_Free(static_cast<Handle*>(handle));
+void EchoControlMobileImpl::DestroyHandle(void* handle) const {
+ WebRtcAecm_Free(static_cast<Handle*>(handle));
}
int EchoControlMobileImpl::InitializeHandle(void* handle) const {
assert(handle != NULL);
Handle* my_handle = static_cast<Handle*>(handle);
- if (WebRtcAecm_Init(my_handle, apm_->sample_rate_hz()) != 0) {
+ if (WebRtcAecm_Init(my_handle, apm_->proc_sample_rate_hz()) != 0) {
return GetHandleError(my_handle);
}
if (external_echo_path_ != NULL) {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.h b/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.h
index 5eefab0a3c2..4f5b5931a1c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/echo_control_mobile_impl.h
@@ -15,13 +15,15 @@
#include "webrtc/modules/audio_processing/processing_component.h"
namespace webrtc {
-class AudioProcessingImpl;
+
class AudioBuffer;
+class CriticalSectionWrapper;
class EchoControlMobileImpl : public EchoControlMobile,
public ProcessingComponent {
public:
- explicit EchoControlMobileImpl(const AudioProcessingImpl* apm);
+ EchoControlMobileImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit);
virtual ~EchoControlMobileImpl();
int ProcessRenderAudio(const AudioBuffer* audio);
@@ -47,11 +49,12 @@ class EchoControlMobileImpl : public EchoControlMobile,
virtual void* CreateHandle() const OVERRIDE;
virtual int InitializeHandle(void* handle) const OVERRIDE;
virtual int ConfigureHandle(void* handle) const OVERRIDE;
- virtual int DestroyHandle(void* handle) const OVERRIDE;
+ virtual void DestroyHandle(void* handle) const OVERRIDE;
virtual int num_handles_required() const OVERRIDE;
virtual int GetHandleError(void* handle) const OVERRIDE;
- const AudioProcessingImpl* apm_;
+ const AudioProcessing* apm_;
+ CriticalSectionWrapper* crit_;
RoutingMode routing_mode_;
bool comfort_noise_enabled_;
unsigned char* external_echo_path_;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc
index 35547031e30..a67b67ecb16 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.cc
@@ -12,12 +12,10 @@
#include <assert.h>
+#include "webrtc/modules/audio_processing/audio_buffer.h"
#include "webrtc/modules/audio_processing/agc/include/gain_control.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/modules/audio_processing/audio_processing_impl.h"
-
namespace webrtc {
typedef void Handle;
@@ -37,9 +35,11 @@ int16_t MapSetting(GainControl::Mode mode) {
}
} // namespace
-GainControlImpl::GainControlImpl(const AudioProcessingImpl* apm)
- : ProcessingComponent(apm),
+GainControlImpl::GainControlImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit)
+ : ProcessingComponent(),
apm_(apm),
+ crit_(crit),
mode_(kAdaptiveAnalog),
minimum_capture_level_(0),
maximum_capture_level_(255),
@@ -59,7 +59,7 @@ int GainControlImpl::ProcessRenderAudio(AudioBuffer* audio) {
assert(audio->samples_per_split_channel() <= 160);
- int16_t* mixed_data = audio->low_pass_split_data(0);
+ const int16_t* mixed_data = audio->low_pass_split_data(0);
if (audio->num_channels() > 1) {
audio->CopyAndMixLowPass(1);
mixed_data = audio->mixed_low_pass_data(0);
@@ -91,6 +91,7 @@ int GainControlImpl::AnalyzeCaptureAudio(AudioBuffer* audio) {
int err = apm_->kNoError;
if (mode_ == kAdaptiveAnalog) {
+ capture_levels_.assign(num_handles(), analog_capture_level_);
for (int i = 0; i < num_handles(); i++) {
Handle* my_handle = static_cast<Handle*>(handle(i));
err = WebRtcAgc_AddMic(
@@ -114,7 +115,6 @@ int GainControlImpl::AnalyzeCaptureAudio(AudioBuffer* audio) {
audio->low_pass_split_data(i),
audio->high_pass_split_data(i),
static_cast<int16_t>(audio->samples_per_split_channel()),
- //capture_levels_[i],
analog_capture_level_,
&capture_level_out);
@@ -190,13 +190,6 @@ int GainControlImpl::set_stream_analog_level(int level) {
if (level < minimum_capture_level_ || level > maximum_capture_level_) {
return apm_->kBadParameterError;
}
-
- if (mode_ == kAdaptiveAnalog) {
- if (level != analog_capture_level_) {
- // The analog level has been changed; update our internal levels.
- capture_levels_.assign(num_handles(), level);
- }
- }
analog_capture_level_ = level;
return apm_->kNoError;
@@ -210,7 +203,7 @@ int GainControlImpl::stream_analog_level() {
}
int GainControlImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
return EnableComponent(enable);
}
@@ -219,7 +212,7 @@ bool GainControlImpl::is_enabled() const {
}
int GainControlImpl::set_mode(Mode mode) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (MapSetting(mode) == -1) {
return apm_->kBadParameterError;
}
@@ -234,7 +227,7 @@ GainControl::Mode GainControlImpl::mode() const {
int GainControlImpl::set_analog_level_limits(int minimum,
int maximum) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (minimum < 0) {
return apm_->kBadParameterError;
}
@@ -266,7 +259,7 @@ bool GainControlImpl::stream_is_saturated() const {
}
int GainControlImpl::set_target_level_dbfs(int level) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (level > 31 || level < 0) {
return apm_->kBadParameterError;
}
@@ -280,7 +273,7 @@ int GainControlImpl::target_level_dbfs() const {
}
int GainControlImpl::set_compression_gain_db(int gain) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (gain < 0 || gain > 90) {
return apm_->kBadParameterError;
}
@@ -294,7 +287,7 @@ int GainControlImpl::compression_gain_db() const {
}
int GainControlImpl::enable_limiter(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
limiter_enabled_ = enable;
return Configure();
}
@@ -309,11 +302,7 @@ int GainControlImpl::Initialize() {
return err;
}
- analog_capture_level_ =
- (maximum_capture_level_ - minimum_capture_level_) >> 1;
capture_levels_.assign(num_handles(), analog_capture_level_);
- was_analog_level_set_ = false;
-
return apm_->kNoError;
}
@@ -328,8 +317,8 @@ void* GainControlImpl::CreateHandle() const {
return handle;
}
-int GainControlImpl::DestroyHandle(void* handle) const {
- return WebRtcAgc_Free(static_cast<Handle*>(handle));
+void GainControlImpl::DestroyHandle(void* handle) const {
+ WebRtcAgc_Free(static_cast<Handle*>(handle));
}
int GainControlImpl::InitializeHandle(void* handle) const {
@@ -337,7 +326,7 @@ int GainControlImpl::InitializeHandle(void* handle) const {
minimum_capture_level_,
maximum_capture_level_,
MapSetting(mode_),
- apm_->sample_rate_hz());
+ apm_->proc_sample_rate_hz());
}
int GainControlImpl::ConfigureHandle(void* handle) const {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h b/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h
index 2de02f6e6b7..81159870009 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/gain_control_impl.h
@@ -17,13 +17,15 @@
#include "webrtc/modules/audio_processing/processing_component.h"
namespace webrtc {
-class AudioProcessingImpl;
+
class AudioBuffer;
+class CriticalSectionWrapper;
class GainControlImpl : public GainControl,
public ProcessingComponent {
public:
- explicit GainControlImpl(const AudioProcessingImpl* apm);
+ GainControlImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit);
virtual ~GainControlImpl();
int ProcessRenderAudio(AudioBuffer* audio);
@@ -58,11 +60,12 @@ class GainControlImpl : public GainControl,
virtual void* CreateHandle() const OVERRIDE;
virtual int InitializeHandle(void* handle) const OVERRIDE;
virtual int ConfigureHandle(void* handle) const OVERRIDE;
- virtual int DestroyHandle(void* handle) const OVERRIDE;
+ virtual void DestroyHandle(void* handle) const OVERRIDE;
virtual int num_handles_required() const OVERRIDE;
virtual int GetHandleError(void* handle) const OVERRIDE;
- const AudioProcessingImpl* apm_;
+ const AudioProcessing* apm_;
+ CriticalSectionWrapper* crit_;
Mode mode_;
int minimum_capture_level_;
int maximum_capture_level_;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/gen_core_neon_offsets.gyp b/chromium/third_party/webrtc/modules/audio_processing/gen_core_neon_offsets.gyp
new file mode 100644
index 00000000000..55c79689f7e
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/gen_core_neon_offsets.gyp
@@ -0,0 +1,45 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+ 'includes': ['lib_core_neon_offsets.gypi'],
+ 'targets' : [
+ {
+ 'target_name': 'gen_nsx_core_neon_offsets_h',
+ 'type': 'none',
+ 'dependencies': [
+ 'lib_core_neon_offsets',
+ '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx_obj_int_extract#host',
+ ],
+ 'sources': ['<(shared_generated_dir)/nsx_core_neon_offsets.o',],
+ 'variables' : {
+ 'unpack_lib_name':'nsx_core_neon_offsets.o',
+ },
+ 'includes': [
+ '../../../third_party/libvpx/unpack_lib_posix.gypi',
+ '../../../third_party/libvpx/obj_int_extract.gypi',
+ ],
+ },
+ {
+ 'target_name': 'gen_aecm_core_neon_offsets_h',
+ 'type': 'none',
+ 'dependencies': [
+ 'lib_core_neon_offsets',
+ '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx_obj_int_extract#host',
+ ],
+ 'variables': {
+ 'unpack_lib_name':'aecm_core_neon_offsets.o',
+ },
+ 'sources': ['<(shared_generated_dir)/aecm_core_neon_offsets.o',],
+ 'includes': [
+ '../../../third_party/libvpx/unpack_lib_posix.gypi',
+ '../../../third_party/libvpx/obj_int_extract.gypi',
+ ],
+ },
+ ],
+}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/gen_core_neon_offsets_chromium.gyp b/chromium/third_party/webrtc/modules/audio_processing/gen_core_neon_offsets_chromium.gyp
new file mode 100644
index 00000000000..f4a9134fb21
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/gen_core_neon_offsets_chromium.gyp
@@ -0,0 +1,45 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+ 'includes': ['lib_core_neon_offsets.gypi'],
+ 'targets' : [
+ {
+ 'target_name': 'gen_nsx_core_neon_offsets_h',
+ 'type': 'none',
+ 'dependencies': [
+ 'lib_core_neon_offsets',
+ '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx_obj_int_extract#host',
+ ],
+ 'sources': ['<(shared_generated_dir)/nsx_core_neon_offsets.o',],
+ 'variables' : {
+ 'unpack_lib_name':'nsx_core_neon_offsets.o',
+ },
+ 'includes': [
+ '../../../../third_party/libvpx/unpack_lib_posix.gypi',
+ '../../../../third_party/libvpx/obj_int_extract.gypi',
+ ],
+ },
+ {
+ 'target_name': 'gen_aecm_core_neon_offsets_h',
+ 'type': 'none',
+ 'dependencies': [
+ 'lib_core_neon_offsets',
+ '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx_obj_int_extract#host',
+ ],
+ 'variables': {
+ 'unpack_lib_name':'aecm_core_neon_offsets.o',
+ },
+ 'sources': ['<(shared_generated_dir)/aecm_core_neon_offsets.o',],
+ 'includes': [
+ '../../../../third_party/libvpx/unpack_lib_posix.gypi',
+ '../../../../third_party/libvpx/obj_int_extract.gypi',
+ ],
+ },
+ ],
+}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.cc
index da2170373b0..0a23ff23555 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.cc
@@ -13,11 +13,10 @@
#include <assert.h>
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/modules/audio_processing/audio_buffer.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/typedefs.h"
-#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/modules/audio_processing/audio_processing_impl.h"
namespace webrtc {
namespace {
@@ -36,7 +35,7 @@ struct FilterState {
int InitializeFilter(FilterState* hpf, int sample_rate_hz) {
assert(hpf != NULL);
- if (sample_rate_hz == AudioProcessingImpl::kSampleRate8kHz) {
+ if (sample_rate_hz == AudioProcessing::kSampleRate8kHz) {
hpf->ba = kFilterCoefficients8kHz;
} else {
hpf->ba = kFilterCoefficients;
@@ -105,9 +104,11 @@ int Filter(FilterState* hpf, int16_t* data, int length) {
typedef FilterState Handle;
-HighPassFilterImpl::HighPassFilterImpl(const AudioProcessingImpl* apm)
- : ProcessingComponent(apm),
- apm_(apm) {}
+HighPassFilterImpl::HighPassFilterImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit)
+ : ProcessingComponent(),
+ apm_(apm),
+ crit_(crit) {}
HighPassFilterImpl::~HighPassFilterImpl() {}
@@ -135,7 +136,7 @@ int HighPassFilterImpl::ProcessCaptureAudio(AudioBuffer* audio) {
}
int HighPassFilterImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
return EnableComponent(enable);
}
@@ -147,14 +148,13 @@ void* HighPassFilterImpl::CreateHandle() const {
return new FilterState;
}
-int HighPassFilterImpl::DestroyHandle(void* handle) const {
+void HighPassFilterImpl::DestroyHandle(void* handle) const {
delete static_cast<Handle*>(handle);
- return apm_->kNoError;
}
int HighPassFilterImpl::InitializeHandle(void* handle) const {
return InitializeFilter(static_cast<Handle*>(handle),
- apm_->sample_rate_hz());
+ apm_->proc_sample_rate_hz());
}
int HighPassFilterImpl::ConfigureHandle(void* /*handle*/) const {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.h b/chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.h
index 7e11ea9ceee..6f91f3bc049 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/high_pass_filter_impl.h
@@ -15,13 +15,14 @@
#include "webrtc/modules/audio_processing/processing_component.h"
namespace webrtc {
-class AudioProcessingImpl;
+
class AudioBuffer;
+class CriticalSectionWrapper;
class HighPassFilterImpl : public HighPassFilter,
public ProcessingComponent {
public:
- explicit HighPassFilterImpl(const AudioProcessingImpl* apm);
+ HighPassFilterImpl(const AudioProcessing* apm, CriticalSectionWrapper* crit);
virtual ~HighPassFilterImpl();
int ProcessCaptureAudio(AudioBuffer* audio);
@@ -37,11 +38,12 @@ class HighPassFilterImpl : public HighPassFilter,
virtual void* CreateHandle() const OVERRIDE;
virtual int InitializeHandle(void* handle) const OVERRIDE;
virtual int ConfigureHandle(void* handle) const OVERRIDE;
- virtual int DestroyHandle(void* handle) const OVERRIDE;
+ virtual void DestroyHandle(void* handle) const OVERRIDE;
virtual int num_handles_required() const OVERRIDE;
virtual int GetHandleError(void* handle) const OVERRIDE;
- const AudioProcessingImpl* apm_;
+ const AudioProcessing* apm_;
+ CriticalSectionWrapper* crit_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h b/chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h
index b5c856de273..77c3f3add22 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/include/audio_processing.h
@@ -15,7 +15,6 @@
#include <stdio.h> // FILE
#include "webrtc/common.h"
-#include "webrtc/modules/interface/module.h"
#include "webrtc/typedefs.h"
struct AecCore;
@@ -50,11 +49,32 @@ class VoiceDetection;
// except when really necessary.
struct DelayCorrection {
DelayCorrection() : enabled(false) {}
- DelayCorrection(bool enabled) : enabled(enabled) {}
+ explicit DelayCorrection(bool enabled) : enabled(enabled) {}
+ bool enabled;
+};
+
+// Use to disable the reported system delays. By disabling the reported system
+// delays the echo cancellation algorithm assumes the process and reverse
+// streams to be aligned. This configuration only applies to EchoCancellation
+// and not EchoControlMobile and is set with AudioProcessing::SetExtraOptions().
+// Note that by disabling reported system delays the EchoCancellation may
+// regress in performance.
+struct ReportedDelay {
+ ReportedDelay() : enabled(true) {}
+ explicit ReportedDelay(bool enabled) : enabled(enabled) {}
+ bool enabled;
+};
+// Must be provided through AudioProcessing::Create(Confg&). It will have no
+// impact if used with AudioProcessing::SetExtraOptions().
+struct ExperimentalAgc {
+ ExperimentalAgc() : enabled(true) {}
+ explicit ExperimentalAgc(bool enabled) : enabled(enabled) {}
bool enabled;
};
+static const int kAudioProcMaxNativeSampleRateHz = 32000;
+
// The Audio Processing Module (APM) provides a collection of voice processing
// components designed for real-time communications software.
//
@@ -84,16 +104,12 @@ struct DelayCorrection {
// 2. Parameter getters are never called concurrently with the corresponding
// setter.
//
-// APM accepts only 16-bit linear PCM audio data in frames of 10 ms. Multiple
-// channels should be interleaved.
+// APM accepts only linear PCM audio data in chunks of 10 ms. The int16
+// interfaces use interleaved data, while the float interfaces use deinterleaved
+// data.
//
// Usage example, omitting error checking:
// AudioProcessing* apm = AudioProcessing::Create(0);
-// apm->set_sample_rate_hz(32000); // Super-wideband processing.
-//
-// // Mono capture and stereo render.
-// apm->set_num_channels(1, 1);
-// apm->set_num_reverse_channels(2);
//
// apm->high_pass_filter()->Enable(true);
//
@@ -132,13 +148,27 @@ struct DelayCorrection {
// // Close the application...
// delete apm;
//
-class AudioProcessing : public Module {
+class AudioProcessing {
public:
- // Creates a APM instance, with identifier |id|. Use one instance for every
- // primary audio stream requiring processing. On the client-side, this would
- // typically be one instance for the near-end stream, and additional instances
- // for each far-end stream which requires processing. On the server-side,
- // this would typically be one instance for every incoming stream.
+ enum ChannelLayout {
+ kMono,
+ // Left, right.
+ kStereo,
+ // Mono, keyboard mic.
+ kMonoAndKeyboard,
+ // Left, right, keyboard mic.
+ kStereoAndKeyboard
+ };
+
+ // Creates an APM instance. Use one instance for every primary audio stream
+ // requiring processing. On the client-side, this would typically be one
+ // instance for the near-end stream, and additional instances for each far-end
+ // stream which requires processing. On the server-side, this would typically
+ // be one instance for every incoming stream.
+ static AudioProcessing* Create();
+ // Allows passing in an optional configuration at create-time.
+ static AudioProcessing* Create(const Config& config);
+ // TODO(ajm): Deprecated; remove all calls to it.
static AudioProcessing* Create(int id);
virtual ~AudioProcessing() {}
@@ -147,11 +177,26 @@ class AudioProcessing : public Module {
// it is not necessary to call before processing the first stream after
// creation.
//
- // set_sample_rate_hz(), set_num_channels() and set_num_reverse_channels()
- // will trigger a full initialization if the settings are changed from their
- // existing values. Otherwise they are no-ops.
+ // It is also not necessary to call if the audio parameters (sample
+ // rate and number of channels) have changed. Passing updated parameters
+ // directly to |ProcessStream()| and |AnalyzeReverseStream()| is permissible.
+ // If the parameters are known at init-time though, they may be provided.
virtual int Initialize() = 0;
+ // The int16 interfaces require:
+ // - only |NativeRate|s be used
+ // - that the input, output and reverse rates must match
+ // - that |output_layout| matches |input_layout|
+ //
+ // The float interfaces accept arbitrary rates and support differing input
+ // and output layouts, but the output may only remove channels, not add.
+ virtual int Initialize(int input_sample_rate_hz,
+ int output_sample_rate_hz,
+ int reverse_sample_rate_hz,
+ ChannelLayout input_layout,
+ ChannelLayout output_layout,
+ ChannelLayout reverse_layout) = 0;
+
// Pass down additional options which don't have explicit setters. This
// ensures the options are applied immediately.
virtual void SetExtraOptions(const Config& config) = 0;
@@ -159,23 +204,30 @@ class AudioProcessing : public Module {
virtual int EnableExperimentalNs(bool enable) = 0;
virtual bool experimental_ns_enabled() const = 0;
- // Sets the sample |rate| in Hz for both the primary and reverse audio
- // streams. 8000, 16000 or 32000 Hz are permitted.
+ // DEPRECATED.
+ // TODO(ajm): Remove after Chromium has upgraded to using Initialize().
virtual int set_sample_rate_hz(int rate) = 0;
+ // TODO(ajm): Remove after voice engine no longer requires it to resample
+ // the reverse stream to the forward rate.
+ virtual int input_sample_rate_hz() const = 0;
+ // TODO(ajm): Remove after Chromium no longer depends on it.
virtual int sample_rate_hz() const = 0;
- // Sets the number of channels for the primary audio stream. Input frames must
- // contain a number of channels given by |input_channels|, while output frames
- // will be returned with number of channels given by |output_channels|.
- virtual int set_num_channels(int input_channels, int output_channels) = 0;
+ // TODO(ajm): Only intended for internal use. Make private and friend the
+ // necessary classes?
+ virtual int proc_sample_rate_hz() const = 0;
+ virtual int proc_split_sample_rate_hz() const = 0;
virtual int num_input_channels() const = 0;
virtual int num_output_channels() const = 0;
-
- // Sets the number of channels for the reverse audio stream. Input frames must
- // contain a number of channels given by |channels|.
- virtual int set_num_reverse_channels(int channels) = 0;
virtual int num_reverse_channels() const = 0;
+ // Set to true when the output of AudioProcessing will be muted or in some
+ // other way not used. Ideally, the captured audio would still be processed,
+ // but some components may change behavior based on this information.
+ // Default false.
+ virtual void set_output_will_be_muted(bool muted) = 0;
+ virtual bool output_will_be_muted() const = 0;
+
// Processes a 10 ms |frame| of the primary audio stream. On the client-side,
// this is the near-end (or captured) audio.
//
@@ -184,10 +236,25 @@ class AudioProcessing : public Module {
// with the stream_ tag which is needed should be called after processing.
//
// The |sample_rate_hz_|, |num_channels_|, and |samples_per_channel_|
- // members of |frame| must be valid, and correspond to settings supplied
- // to APM.
+ // members of |frame| must be valid. If changed from the previous call to this
+ // method, it will trigger an initialization.
virtual int ProcessStream(AudioFrame* frame) = 0;
+ // Accepts deinterleaved float audio with the range [-1, 1]. Each element
+ // of |src| points to a channel buffer, arranged according to
+ // |input_layout|. At output, the channels will be arranged according to
+ // |output_layout| at |output_sample_rate_hz| in |dest|.
+ //
+ // The output layout may only remove channels, not add. |src| and |dest|
+ // may use the same memory, if desired.
+ virtual int ProcessStream(const float* const* src,
+ int samples_per_channel,
+ int input_sample_rate_hz,
+ ChannelLayout input_layout,
+ int output_sample_rate_hz,
+ ChannelLayout output_layout,
+ float* const* dest) = 0;
+
// Analyzes a 10 ms |frame| of the reverse direction audio stream. The frame
// will not be modified. On the client-side, this is the far-end (or to be
// rendered) audio.
@@ -199,11 +266,19 @@ class AudioProcessing : public Module {
// chances are you don't need to use it.
//
// The |sample_rate_hz_|, |num_channels_|, and |samples_per_channel_|
- // members of |frame| must be valid.
+ // members of |frame| must be valid. |sample_rate_hz_| must correspond to
+ // |input_sample_rate_hz()|
//
// TODO(ajm): add const to input; requires an implementation fix.
virtual int AnalyzeReverseStream(AudioFrame* frame) = 0;
+ // Accepts deinterleaved float audio with the range [-1, 1]. Each element
+ // of |data| points to a channel buffer, arranged according to |layout|.
+ virtual int AnalyzeReverseStream(const float* const* data,
+ int samples_per_channel,
+ int sample_rate_hz,
+ ChannelLayout layout) = 0;
+
// This must be called if and only if echo processing is enabled.
//
// Sets the |delay| in ms between AnalyzeReverseStream() receiving a far-end
@@ -219,6 +294,12 @@ class AudioProcessing : public Module {
// ProcessStream().
virtual int set_stream_delay_ms(int delay) = 0;
virtual int stream_delay_ms() const = 0;
+ virtual bool was_stream_delay_set() const = 0;
+
+ // Call to signal that a key press occurred (true) or did not occur (false)
+ // with this chunk of audio.
+ virtual void set_stream_key_pressed(bool key_pressed) = 0;
+ virtual bool stream_key_pressed() const = 0;
// Sets a delay |offset| in ms to add to the values passed in through
// set_stream_delay_ms(). May be positive or negative.
@@ -283,9 +364,13 @@ class AudioProcessing : public Module {
kBadStreamParameterWarning = -13
};
- // Inherited from Module.
- virtual int32_t TimeUntilNextProcess() OVERRIDE;
- virtual int32_t Process() OVERRIDE;
+ enum NativeRate {
+ kSampleRate8kHz = 8000,
+ kSampleRate16kHz = 16000,
+ kSampleRate32kHz = 32000
+ };
+
+ static const int kChunkSizeMs = 10;
};
// The acoustic echo cancellation (AEC) component provides better performance
@@ -306,16 +391,10 @@ class EchoCancellation {
// render and capture devices are used, particularly with webcams.
//
// This enables a compensation mechanism, and requires that
- // |set_device_sample_rate_hz()| and |set_stream_drift_samples()| be called.
+ // set_stream_drift_samples() be called.
virtual int enable_drift_compensation(bool enable) = 0;
virtual bool is_drift_compensation_enabled() const = 0;
- // Provides the sampling rate of the audio devices. It is assumed the render
- // and capture devices use the same nominal sample rate. Required if and only
- // if drift compensation is enabled.
- virtual int set_device_sample_rate_hz(int rate) = 0;
- virtual int device_sample_rate_hz() const = 0;
-
// Sets the difference between the number of samples rendered and captured by
// the audio devices since the last call to |ProcessStream()|. Must be called
// if drift compensation is enabled, prior to |ProcessStream()|.
@@ -555,8 +634,7 @@ class LevelEstimator {
// frames since the last call to RMS(). The returned value is positive but
// should be interpreted as negative. It is constrained to [0, 127].
//
- // The computation follows:
- // http://tools.ietf.org/html/draft-ietf-avtext-client-to-mixer-audio-level-05
+ // The computation follows: https://tools.ietf.org/html/rfc6465
// with the intent that it can provide the RTP audio level indication.
//
// Frames passed to ProcessStream() with an |_energy| of zero are considered
diff --git a/chromium/third_party/webrtc/modules/audio_processing/include/mock_audio_processing.h b/chromium/third_party/webrtc/modules/audio_processing/include/mock_audio_processing.h
index 46520ab494e..c1ac23adf76 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/include/mock_audio_processing.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/include/mock_audio_processing.h
@@ -26,10 +26,6 @@ class MockEchoCancellation : public EchoCancellation {
int(bool enable));
MOCK_CONST_METHOD0(is_drift_compensation_enabled,
bool());
- MOCK_METHOD1(set_device_sample_rate_hz,
- int(int rate));
- MOCK_CONST_METHOD0(device_sample_rate_hz,
- int());
MOCK_METHOD1(set_stream_drift_samples,
void(int drift));
MOCK_CONST_METHOD0(stream_drift_samples,
@@ -181,6 +177,13 @@ class MockAudioProcessing : public AudioProcessing {
MOCK_METHOD0(Initialize,
int());
+ MOCK_METHOD6(Initialize,
+ int(int sample_rate_hz,
+ int output_sample_rate_hz,
+ int reverse_sample_rate_hz,
+ ChannelLayout input_layout,
+ ChannelLayout output_layout,
+ ChannelLayout reverse_layout));
MOCK_METHOD1(SetExtraOptions,
void(const Config& config));
MOCK_METHOD1(EnableExperimentalNs,
@@ -189,26 +192,49 @@ class MockAudioProcessing : public AudioProcessing {
bool());
MOCK_METHOD1(set_sample_rate_hz,
int(int rate));
+ MOCK_CONST_METHOD0(input_sample_rate_hz,
+ int());
MOCK_CONST_METHOD0(sample_rate_hz,
int());
- MOCK_METHOD2(set_num_channels,
- int(int input_channels, int output_channels));
+ MOCK_CONST_METHOD0(proc_sample_rate_hz,
+ int());
+ MOCK_CONST_METHOD0(proc_split_sample_rate_hz,
+ int());
MOCK_CONST_METHOD0(num_input_channels,
int());
MOCK_CONST_METHOD0(num_output_channels,
int());
- MOCK_METHOD1(set_num_reverse_channels,
- int(int channels));
MOCK_CONST_METHOD0(num_reverse_channels,
int());
+ MOCK_METHOD1(set_output_will_be_muted,
+ void(bool muted));
+ MOCK_CONST_METHOD0(output_will_be_muted,
+ bool());
MOCK_METHOD1(ProcessStream,
int(AudioFrame* frame));
+ MOCK_METHOD7(ProcessStream,
+ int(const float* const* src,
+ int samples_per_channel,
+ int input_sample_rate_hz,
+ ChannelLayout input_layout,
+ int output_sample_rate_hz,
+ ChannelLayout output_layout,
+ float* const* dest));
MOCK_METHOD1(AnalyzeReverseStream,
int(AudioFrame* frame));
+ MOCK_METHOD4(AnalyzeReverseStream,
+ int(const float* const* data, int frames, int sample_rate_hz,
+ ChannelLayout input_layout));
MOCK_METHOD1(set_stream_delay_ms,
int(int delay));
MOCK_CONST_METHOD0(stream_delay_ms,
int());
+ MOCK_CONST_METHOD0(was_stream_delay_set,
+ bool());
+ MOCK_METHOD1(set_stream_key_pressed,
+ void(bool key_pressed));
+ MOCK_CONST_METHOD0(stream_key_pressed,
+ bool());
MOCK_METHOD1(set_delay_offset_ms,
void(int offset));
MOCK_CONST_METHOD0(delay_offset_ms,
@@ -230,20 +256,16 @@ class MockAudioProcessing : public AudioProcessing {
}
virtual MockHighPassFilter* high_pass_filter() const {
return high_pass_filter_.get();
- };
+ }
virtual MockLevelEstimator* level_estimator() const {
return level_estimator_.get();
- };
+ }
virtual MockNoiseSuppression* noise_suppression() const {
return noise_suppression_.get();
- };
+ }
virtual MockVoiceDetection* voice_detection() const {
return voice_detection_.get();
- };
- MOCK_METHOD0(TimeUntilNextProcess,
- int32_t());
- MOCK_METHOD0(Process,
- int32_t());
+ }
private:
scoped_ptr<MockEchoCancellation> echo_cancellation_;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.cc
index 29dbdfc78e0..cfe295a6a0b 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.cc
@@ -10,110 +10,35 @@
#include "webrtc/modules/audio_processing/level_estimator_impl.h"
-#include <assert.h>
-#include <math.h>
-#include <string.h>
-
#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/modules/audio_processing/audio_processing_impl.h"
+#include "webrtc/modules/audio_processing/include/audio_processing.h"
+#include "webrtc/modules/audio_processing/rms_level.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
namespace webrtc {
-namespace {
-
-const double kMaxSquaredLevel = 32768.0 * 32768.0;
-
-class Level {
- public:
- static const int kMinLevel = 127;
-
- Level()
- : sum_square_(0.0),
- sample_count_(0) {}
- ~Level() {}
-
- void Init() {
- sum_square_ = 0.0;
- sample_count_ = 0;
- }
-
- void Process(int16_t* data, int length) {
- assert(data != NULL);
- assert(length > 0);
- sum_square_ += SumSquare(data, length);
- sample_count_ += length;
- }
-
- void ProcessMuted(int length) {
- assert(length > 0);
- sample_count_ += length;
- }
- int RMS() {
- if (sample_count_ == 0 || sum_square_ == 0.0) {
- Init();
- return kMinLevel;
- }
-
- // Normalize by the max level.
- double rms = sum_square_ / (sample_count_ * kMaxSquaredLevel);
- // 20log_10(x^0.5) = 10log_10(x)
- rms = 10 * log10(rms);
- if (rms > 0)
- rms = 0;
- else if (rms < -kMinLevel)
- rms = -kMinLevel;
-
- rms = -rms;
- Init();
- return static_cast<int>(rms + 0.5);
- }
-
- private:
- static double SumSquare(int16_t* data, int length) {
- double sum_square = 0.0;
- for (int i = 0; i < length; ++i) {
- double data_d = static_cast<double>(data[i]);
- sum_square += data_d * data_d;
- }
- return sum_square;
- }
-
- double sum_square_;
- int sample_count_;
-};
-} // namespace
-
-LevelEstimatorImpl::LevelEstimatorImpl(const AudioProcessingImpl* apm)
- : ProcessingComponent(apm),
- apm_(apm) {}
+LevelEstimatorImpl::LevelEstimatorImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit)
+ : ProcessingComponent(),
+ crit_(crit) {}
LevelEstimatorImpl::~LevelEstimatorImpl() {}
int LevelEstimatorImpl::ProcessStream(AudioBuffer* audio) {
if (!is_component_enabled()) {
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
- Level* level = static_cast<Level*>(handle(0));
- if (audio->is_muted()) {
- level->ProcessMuted(audio->samples_per_channel());
- return apm_->kNoError;
+ RMSLevel* rms_level = static_cast<RMSLevel*>(handle(0));
+ for (int i = 0; i < audio->num_channels(); ++i) {
+ rms_level->Process(audio->data(i), audio->samples_per_channel());
}
- int16_t* mixed_data = audio->data(0);
- if (audio->num_channels() > 1) {
- audio->CopyAndMix(1);
- mixed_data = audio->mixed_data(0);
- }
-
- level->Process(mixed_data, audio->samples_per_channel());
-
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
int LevelEstimatorImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
return EnableComponent(enable);
}
@@ -123,43 +48,38 @@ bool LevelEstimatorImpl::is_enabled() const {
int LevelEstimatorImpl::RMS() {
if (!is_component_enabled()) {
- return apm_->kNotEnabledError;
+ return AudioProcessing::kNotEnabledError;
}
- Level* level = static_cast<Level*>(handle(0));
- return level->RMS();
+ RMSLevel* rms_level = static_cast<RMSLevel*>(handle(0));
+ return rms_level->RMS();
}
+// The ProcessingComponent implementation is pretty weird in this class since
+// we have only a single instance of the trivial underlying component.
void* LevelEstimatorImpl::CreateHandle() const {
- return new Level;
+ return new RMSLevel;
}
-int LevelEstimatorImpl::DestroyHandle(void* handle) const {
- assert(handle != NULL);
- Level* level = static_cast<Level*>(handle);
- delete level;
- return apm_->kNoError;
+void LevelEstimatorImpl::DestroyHandle(void* handle) const {
+ delete static_cast<RMSLevel*>(handle);
}
int LevelEstimatorImpl::InitializeHandle(void* handle) const {
- assert(handle != NULL);
- Level* level = static_cast<Level*>(handle);
- level->Init();
-
- return apm_->kNoError;
+ static_cast<RMSLevel*>(handle)->Reset();
+ return AudioProcessing::kNoError;
}
int LevelEstimatorImpl::ConfigureHandle(void* /*handle*/) const {
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
int LevelEstimatorImpl::num_handles_required() const {
return 1;
}
-int LevelEstimatorImpl::GetHandleError(void* handle) const {
- // The component has no detailed errors.
- assert(handle != NULL);
- return apm_->kUnspecifiedError;
+int LevelEstimatorImpl::GetHandleError(void* /*handle*/) const {
+ return AudioProcessing::kUnspecifiedError;
}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.h b/chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.h
index 20dc18dc425..b38337d4d41 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/level_estimator_impl.h
@@ -13,15 +13,18 @@
#include "webrtc/modules/audio_processing/include/audio_processing.h"
#include "webrtc/modules/audio_processing/processing_component.h"
+#include "webrtc/modules/audio_processing/rms_level.h"
namespace webrtc {
-class AudioProcessingImpl;
+
class AudioBuffer;
+class CriticalSectionWrapper;
class LevelEstimatorImpl : public LevelEstimator,
public ProcessingComponent {
public:
- explicit LevelEstimatorImpl(const AudioProcessingImpl* apm);
+ LevelEstimatorImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit);
virtual ~LevelEstimatorImpl();
int ProcessStream(AudioBuffer* audio);
@@ -38,12 +41,13 @@ class LevelEstimatorImpl : public LevelEstimator,
virtual void* CreateHandle() const OVERRIDE;
virtual int InitializeHandle(void* handle) const OVERRIDE;
virtual int ConfigureHandle(void* handle) const OVERRIDE;
- virtual int DestroyHandle(void* handle) const OVERRIDE;
+ virtual void DestroyHandle(void* handle) const OVERRIDE;
virtual int num_handles_required() const OVERRIDE;
virtual int GetHandleError(void* handle) const OVERRIDE;
- const AudioProcessingImpl* apm_;
+ CriticalSectionWrapper* crit_;
};
+
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_LEVEL_ESTIMATOR_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/lib_core_neon_offsets.gypi b/chromium/third_party/webrtc/modules/audio_processing/lib_core_neon_offsets.gypi
new file mode 100644
index 00000000000..f32ddd47f78
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/lib_core_neon_offsets.gypi
@@ -0,0 +1,51 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# This file has common information for gen_core_neon_offsets.gyp
+# and gen_core_neon_offsets_chromium.gyp
+{
+ 'variables': {
+ 'variables' : {
+ 'lib_intermediate_name': '',
+ 'conditions' : [
+ ['android_webview_build==1', {
+ 'lib_intermediate_name' : '$(abspath $(call intermediates-dir-for,STATIC_LIBRARIES,lib_core_neon_offsets,,,$(gyp_var_prefix)))/lib_core_neon_offsets.a',
+ }],
+ ],
+ },
+ 'shared_generated_dir': '<(SHARED_INTERMEDIATE_DIR)/audio_processing/asm_offsets',
+ 'output_dir': '<(shared_generated_dir)',
+ 'output_format': 'cheader',
+ 'unpack_lib_search_path_list': [
+ '-a', '<(PRODUCT_DIR)/lib_core_neon_offsets.a',
+ '-a', '<(LIB_DIR)/webrtc/modules/audio_processing/lib_core_neon_offsets.a',
+ '-a', '<(LIB_DIR)/third_party/webrtc/modules/audio_processing/lib_core_neon_offsets.a',
+ '-a', '<(lib_intermediate_name)',
+ ],
+ 'unpack_lib_output_dir':'<(shared_generated_dir)',
+ },
+ 'includes': [
+ '../../build/common.gypi',
+ ],
+ 'conditions': [
+ ['((target_arch=="arm" and arm_version==7) or target_arch=="armv7") and (OS=="android" or OS=="ios")', {
+ 'targets' : [
+ {
+ 'target_name': 'lib_core_neon_offsets',
+ 'type': 'static_library',
+ 'android_unmangled_name': 1,
+ 'hard_dependency': 1,
+ 'sources': [
+ 'ns/nsx_core_neon_offsets.c',
+ 'aecm/aecm_core_neon_offsets.c',
+ ],
+ },
+ ],
+ }],
+ ],
+}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc
index 41c11b1cd72..eea0a04a2a6 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.cc
@@ -12,15 +12,14 @@
#include <assert.h>
-#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/modules/audio_processing/audio_buffer.h"
#if defined(WEBRTC_NS_FLOAT)
#include "webrtc/modules/audio_processing/ns/include/noise_suppression.h"
#elif defined(WEBRTC_NS_FIXED)
#include "webrtc/modules/audio_processing/ns/include/noise_suppression_x.h"
#endif
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/modules/audio_processing/audio_processing_impl.h"
namespace webrtc {
@@ -47,9 +46,11 @@ int MapSetting(NoiseSuppression::Level level) {
}
} // namespace
-NoiseSuppressionImpl::NoiseSuppressionImpl(const AudioProcessingImpl* apm)
- : ProcessingComponent(apm),
+NoiseSuppressionImpl::NoiseSuppressionImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit)
+ : ProcessingComponent(),
apm_(apm),
+ crit_(crit),
level_(kModerate) {}
NoiseSuppressionImpl::~NoiseSuppressionImpl() {}
@@ -67,10 +68,10 @@ int NoiseSuppressionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
Handle* my_handle = static_cast<Handle*>(handle(i));
#if defined(WEBRTC_NS_FLOAT)
err = WebRtcNs_Process(static_cast<Handle*>(handle(i)),
- audio->low_pass_split_data(i),
- audio->high_pass_split_data(i),
- audio->low_pass_split_data(i),
- audio->high_pass_split_data(i));
+ audio->low_pass_split_data_f(i),
+ audio->high_pass_split_data_f(i),
+ audio->low_pass_split_data_f(i),
+ audio->high_pass_split_data_f(i));
#elif defined(WEBRTC_NS_FIXED)
err = WebRtcNsx_Process(static_cast<Handle*>(handle(i)),
audio->low_pass_split_data(i),
@@ -88,7 +89,7 @@ int NoiseSuppressionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
}
int NoiseSuppressionImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
return EnableComponent(enable);
}
@@ -97,7 +98,7 @@ bool NoiseSuppressionImpl::is_enabled() const {
}
int NoiseSuppressionImpl::set_level(Level level) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (MapSetting(level) == -1) {
return apm_->kBadParameterError;
}
@@ -140,19 +141,21 @@ void* NoiseSuppressionImpl::CreateHandle() const {
return handle;
}
-int NoiseSuppressionImpl::DestroyHandle(void* handle) const {
+void NoiseSuppressionImpl::DestroyHandle(void* handle) const {
#if defined(WEBRTC_NS_FLOAT)
- return WebRtcNs_Free(static_cast<Handle*>(handle));
+ WebRtcNs_Free(static_cast<Handle*>(handle));
#elif defined(WEBRTC_NS_FIXED)
- return WebRtcNsx_Free(static_cast<Handle*>(handle));
+ WebRtcNsx_Free(static_cast<Handle*>(handle));
#endif
}
int NoiseSuppressionImpl::InitializeHandle(void* handle) const {
#if defined(WEBRTC_NS_FLOAT)
- return WebRtcNs_Init(static_cast<Handle*>(handle), apm_->sample_rate_hz());
+ return WebRtcNs_Init(static_cast<Handle*>(handle),
+ apm_->proc_sample_rate_hz());
#elif defined(WEBRTC_NS_FIXED)
- return WebRtcNsx_Init(static_cast<Handle*>(handle), apm_->sample_rate_hz());
+ return WebRtcNsx_Init(static_cast<Handle*>(handle),
+ apm_->proc_sample_rate_hz());
#endif
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.h b/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.h
index f6dd8cbd78a..cadbbd9cd4c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/noise_suppression_impl.h
@@ -15,13 +15,15 @@
#include "webrtc/modules/audio_processing/processing_component.h"
namespace webrtc {
-class AudioProcessingImpl;
+
class AudioBuffer;
+class CriticalSectionWrapper;
class NoiseSuppressionImpl : public NoiseSuppression,
public ProcessingComponent {
public:
- explicit NoiseSuppressionImpl(const AudioProcessingImpl* apm);
+ NoiseSuppressionImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit);
virtual ~NoiseSuppressionImpl();
int ProcessCaptureAudio(AudioBuffer* audio);
@@ -40,13 +42,15 @@ class NoiseSuppressionImpl : public NoiseSuppression,
virtual void* CreateHandle() const OVERRIDE;
virtual int InitializeHandle(void* handle) const OVERRIDE;
virtual int ConfigureHandle(void* handle) const OVERRIDE;
- virtual int DestroyHandle(void* handle) const OVERRIDE;
+ virtual void DestroyHandle(void* handle) const OVERRIDE;
virtual int num_handles_required() const OVERRIDE;
virtual int GetHandleError(void* handle) const OVERRIDE;
- const AudioProcessingImpl* apm_;
+ const AudioProcessing* apm_;
+ CriticalSectionWrapper* crit_;
Level level_;
};
+
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_NOISE_SUPPRESSION_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/include/noise_suppression.h b/chromium/third_party/webrtc/modules/audio_processing/ns/include/noise_suppression.h
index 32b18038089..3cf889e2d07 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/include/noise_suppression.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/include/noise_suppression.h
@@ -99,10 +99,10 @@ int WebRtcNs_set_policy(NsHandle* NS_inst, int mode);
* -1 - Error
*/
int WebRtcNs_Process(NsHandle* NS_inst,
- short* spframe,
- short* spframe_H,
- short* outframe,
- short* outframe_H);
+ float* spframe,
+ float* spframe_H,
+ float* outframe,
+ float* outframe_H);
/* Returns the internally used prior speech probability of the current frame.
* There is a frequency bin based one as well, with which this should not be
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/noise_suppression.c b/chromium/third_party/webrtc/modules/audio_processing/ns/noise_suppression.c
index 848467f080c..075ab88c1c6 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/noise_suppression.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/noise_suppression.c
@@ -43,8 +43,8 @@ int WebRtcNs_set_policy(NsHandle* NS_inst, int mode) {
}
-int WebRtcNs_Process(NsHandle* NS_inst, short* spframe, short* spframe_H,
- short* outframe, short* outframe_H) {
+int WebRtcNs_Process(NsHandle* NS_inst, float* spframe, float* spframe_H,
+ float* outframe, float* outframe_H) {
return WebRtcNs_ProcessCore(
(NSinst_t*) NS_inst, spframe, spframe_H, outframe, outframe_H);
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.c b/chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.c
index 124a66d8df5..ec267ae0f69 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.c
@@ -715,10 +715,10 @@ void WebRtcNs_SpeechNoiseProb(NSinst_t* inst, float* probSpeechFinal, float* snr
}
int WebRtcNs_ProcessCore(NSinst_t* inst,
- short* speechFrame,
- short* speechFrameHB,
- short* outFrame,
- short* outFrameHB) {
+ float* speechFrame,
+ float* speechFrameHB,
+ float* outFrame,
+ float* outFrameHB) {
// main routine for noise reduction
int flagHB = 0;
@@ -731,8 +731,8 @@ int WebRtcNs_ProcessCore(NSinst_t* inst,
float snrPrior, currentEstimateStsa;
float tmpFloat1, tmpFloat2, tmpFloat3, probSpeech, probNonSpeech;
float gammaNoiseTmp, gammaNoiseOld;
- float noiseUpdateTmp, fTmp, dTmp;
- float fin[BLOCKL_MAX], fout[BLOCKL_MAX];
+ float noiseUpdateTmp, fTmp;
+ float fout[BLOCKL_MAX];
float winData[ANAL_BLOCKL_MAX];
float magn[HALF_ANAL_BLOCKL], noise[HALF_ANAL_BLOCKL];
float theFilter[HALF_ANAL_BLOCKL], theFilterTmp[HALF_ANAL_BLOCKL];
@@ -775,26 +775,17 @@ int WebRtcNs_ProcessCore(NSinst_t* inst,
updateParsFlag = inst->modelUpdatePars[0];
//
- //for LB do all processing
- // convert to float
- for (i = 0; i < inst->blockLen10ms; i++) {
- fin[i] = (float)speechFrame[i];
- }
// update analysis buffer for L band
memcpy(inst->dataBuf, inst->dataBuf + inst->blockLen10ms,
sizeof(float) * (inst->anaLen - inst->blockLen10ms));
- memcpy(inst->dataBuf + inst->anaLen - inst->blockLen10ms, fin,
+ memcpy(inst->dataBuf + inst->anaLen - inst->blockLen10ms, speechFrame,
sizeof(float) * inst->blockLen10ms);
if (flagHB == 1) {
- // convert to float
- for (i = 0; i < inst->blockLen10ms; i++) {
- fin[i] = (float)speechFrameHB[i];
- }
// update analysis buffer for H band
memcpy(inst->dataBufHB, inst->dataBufHB + inst->blockLen10ms,
sizeof(float) * (inst->anaLen - inst->blockLen10ms));
- memcpy(inst->dataBufHB + inst->anaLen - inst->blockLen10ms, fin,
+ memcpy(inst->dataBufHB + inst->anaLen - inst->blockLen10ms, speechFrameHB,
sizeof(float) * inst->blockLen10ms);
}
@@ -833,30 +824,16 @@ int WebRtcNs_ProcessCore(NSinst_t* inst,
inst->outBuf[i] = fout[i + inst->blockLen10ms];
}
}
- // convert to short
- for (i = 0; i < inst->blockLen10ms; i++) {
- dTmp = fout[i];
- if (dTmp < WEBRTC_SPL_WORD16_MIN) {
- dTmp = WEBRTC_SPL_WORD16_MIN;
- } else if (dTmp > WEBRTC_SPL_WORD16_MAX) {
- dTmp = WEBRTC_SPL_WORD16_MAX;
- }
- outFrame[i] = (short)dTmp;
- }
+ for (i = 0; i < inst->blockLen10ms; ++i)
+ outFrame[i] = WEBRTC_SPL_SAT(
+ WEBRTC_SPL_WORD16_MAX, fout[i], WEBRTC_SPL_WORD16_MIN);
// for time-domain gain of HB
- if (flagHB == 1) {
- for (i = 0; i < inst->blockLen10ms; i++) {
- dTmp = inst->dataBufHB[i];
- if (dTmp < WEBRTC_SPL_WORD16_MIN) {
- dTmp = WEBRTC_SPL_WORD16_MIN;
- } else if (dTmp > WEBRTC_SPL_WORD16_MAX) {
- dTmp = WEBRTC_SPL_WORD16_MAX;
- }
- outFrameHB[i] = (short)dTmp;
- }
- } // end of H band gain computation
- //
+ if (flagHB == 1)
+ for (i = 0; i < inst->blockLen10ms; ++i)
+ outFrameHB[i] = WEBRTC_SPL_SAT(
+ WEBRTC_SPL_WORD16_MAX, inst->dataBufHB[i], WEBRTC_SPL_WORD16_MIN);
+
return 0;
}
@@ -1239,16 +1216,9 @@ int WebRtcNs_ProcessCore(NSinst_t* inst,
inst->outLen -= inst->blockLen10ms;
}
- // convert to short
- for (i = 0; i < inst->blockLen10ms; i++) {
- dTmp = fout[i];
- if (dTmp < WEBRTC_SPL_WORD16_MIN) {
- dTmp = WEBRTC_SPL_WORD16_MIN;
- } else if (dTmp > WEBRTC_SPL_WORD16_MAX) {
- dTmp = WEBRTC_SPL_WORD16_MAX;
- }
- outFrame[i] = (short)dTmp;
- }
+ for (i = 0; i < inst->blockLen10ms; ++i)
+ outFrame[i] = WEBRTC_SPL_SAT(
+ WEBRTC_SPL_WORD16_MAX, fout[i], WEBRTC_SPL_WORD16_MIN);
// for time-domain gain of HB
if (flagHB == 1) {
@@ -1289,13 +1259,9 @@ int WebRtcNs_ProcessCore(NSinst_t* inst,
}
//apply gain
for (i = 0; i < inst->blockLen10ms; i++) {
- dTmp = gainTimeDomainHB * inst->dataBufHB[i];
- if (dTmp < WEBRTC_SPL_WORD16_MIN) {
- dTmp = WEBRTC_SPL_WORD16_MIN;
- } else if (dTmp > WEBRTC_SPL_WORD16_MAX) {
- dTmp = WEBRTC_SPL_WORD16_MAX;
- }
- outFrameHB[i] = (short)dTmp;
+ float o = gainTimeDomainHB * inst->dataBufHB[i];
+ outFrameHB[i] = WEBRTC_SPL_SAT(
+ WEBRTC_SPL_WORD16_MAX, o, WEBRTC_SPL_WORD16_MIN);
}
} // end of H band gain computation
//
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.h b/chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.h
index 50daa137cf8..785239ebdac 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/ns_core.h
@@ -167,10 +167,10 @@ int WebRtcNs_set_policy_core(NSinst_t* inst, int mode);
int WebRtcNs_ProcessCore(NSinst_t* inst,
- short* inFrameLow,
- short* inFrameHigh,
- short* outFrameLow,
- short* outFrameHigh);
+ float* inFrameLow,
+ float* inFrameHigh,
+ float* outFrameLow,
+ float* outFrameHigh);
#ifdef __cplusplus
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c
index c7229579f4c..2c8270f568c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.c
@@ -70,11 +70,6 @@ static const int16_t WebRtcNsx_kLogTableFrac[256] = {
// Skip first frequency bins during estimation. (0 <= value < 64)
static const int kStartBand = 5;
-static const int16_t kIndicatorTable[17] = {
- 0, 2017, 3809, 5227, 6258, 6963, 7424, 7718,
- 7901, 8014, 8084, 8126, 8152, 8168, 8177, 8183, 8187
-};
-
// hybrib Hanning & flat window
static const int16_t kBlocks80w128x[128] = {
0, 536, 1072, 1606, 2139, 2669, 3196, 3720, 4240, 4756, 5266,
@@ -481,7 +476,7 @@ static void PrepareSpectrumC(NsxInst_t* inst, int16_t* freq_buf) {
}
// Denormalize the real-valued signal |in|, the output from inverse FFT.
-static __inline void Denormalize(NsxInst_t* inst, int16_t* in, int factor) {
+static void DenormalizeC(NsxInst_t* inst, int16_t* in, int factor) {
int i = 0;
int32_t tmp32 = 0;
for (i = 0; i < inst->anaLen; i += 1) {
@@ -546,9 +541,9 @@ static void AnalysisUpdateC(NsxInst_t* inst,
}
// Normalize the real-valued signal |in|, the input to forward FFT.
-static __inline void NormalizeRealBuffer(NsxInst_t* inst,
- const int16_t* in,
- int16_t* out) {
+static void NormalizeRealBufferC(NsxInst_t* inst,
+ const int16_t* in,
+ int16_t* out) {
int i = 0;
for (i = 0; i < inst->anaLen; ++i) {
out[i] = WEBRTC_SPL_LSHIFT_W16(in[i], inst->normData); // Q(normData)
@@ -560,6 +555,8 @@ NoiseEstimation WebRtcNsx_NoiseEstimation;
PrepareSpectrum WebRtcNsx_PrepareSpectrum;
SynthesisUpdate WebRtcNsx_SynthesisUpdate;
AnalysisUpdate WebRtcNsx_AnalysisUpdate;
+Denormalize WebRtcNsx_Denormalize;
+NormalizeRealBuffer WebRtcNsx_NormalizeRealBuffer;
#if (defined WEBRTC_DETECT_ARM_NEON || defined WEBRTC_ARCH_ARM_NEON)
// Initialize function pointers for ARM Neon platform.
@@ -571,6 +568,19 @@ static void WebRtcNsx_InitNeon(void) {
}
#endif
+#if defined(MIPS32_LE)
+// Initialize function pointers for MIPS platform.
+static void WebRtcNsx_InitMips(void) {
+ WebRtcNsx_PrepareSpectrum = WebRtcNsx_PrepareSpectrum_mips;
+ WebRtcNsx_SynthesisUpdate = WebRtcNsx_SynthesisUpdate_mips;
+ WebRtcNsx_AnalysisUpdate = WebRtcNsx_AnalysisUpdate_mips;
+ WebRtcNsx_NormalizeRealBuffer = WebRtcNsx_NormalizeRealBuffer_mips;
+#if defined(MIPS_DSP_R1_LE)
+ WebRtcNsx_Denormalize = WebRtcNsx_Denormalize_mips;
+#endif
+}
+#endif
+
void WebRtcNsx_CalcParametricNoiseEstimate(NsxInst_t* inst,
int16_t pink_noise_exp_avg,
int32_t pink_noise_num_avg,
@@ -758,6 +768,8 @@ int32_t WebRtcNsx_InitCore(NsxInst_t* inst, uint32_t fs) {
WebRtcNsx_PrepareSpectrum = PrepareSpectrumC;
WebRtcNsx_SynthesisUpdate = SynthesisUpdateC;
WebRtcNsx_AnalysisUpdate = AnalysisUpdateC;
+ WebRtcNsx_Denormalize = DenormalizeC;
+ WebRtcNsx_NormalizeRealBuffer = NormalizeRealBufferC;
#ifdef WEBRTC_DETECT_ARM_NEON
uint64_t features = WebRtc_GetCPUFeaturesARM();
@@ -768,6 +780,10 @@ int32_t WebRtcNsx_InitCore(NsxInst_t* inst, uint32_t fs) {
WebRtcNsx_InitNeon();
#endif
+#if defined(MIPS32_LE)
+ WebRtcNsx_InitMips();
+#endif
+
inst->initFlag = 1;
return 0;
@@ -1169,239 +1185,6 @@ void WebRtcNsx_ComputeSpectralDifference(NsxInst_t* inst, uint16_t* magnIn) {
}
}
-// Compute speech/noise probability
-// speech/noise probability is returned in: probSpeechFinal
-//snrLocPrior is the prior SNR for each frequency (in Q11)
-//snrLocPost is the post SNR for each frequency (in Q11)
-void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst, uint16_t* nonSpeechProbFinal,
- uint32_t* priorLocSnr, uint32_t* postLocSnr) {
- uint32_t zeros, num, den, tmpU32no1, tmpU32no2, tmpU32no3;
-
- int32_t invLrtFX, indPriorFX, tmp32, tmp32no1, tmp32no2, besselTmpFX32;
- int32_t frac32, logTmp;
- int32_t logLrtTimeAvgKsumFX;
-
- int16_t indPriorFX16;
- int16_t tmp16, tmp16no1, tmp16no2, tmpIndFX, tableIndex, frac, intPart;
-
- int i, normTmp, normTmp2, nShifts;
-
- // compute feature based on average LR factor
- // this is the average over all frequencies of the smooth log LRT
- logLrtTimeAvgKsumFX = 0;
- for (i = 0; i < inst->magnLen; i++) {
- besselTmpFX32 = (int32_t)postLocSnr[i]; // Q11
- normTmp = WebRtcSpl_NormU32(postLocSnr[i]);
- num = WEBRTC_SPL_LSHIFT_U32(postLocSnr[i], normTmp); // Q(11+normTmp)
- if (normTmp > 10) {
- den = WEBRTC_SPL_LSHIFT_U32(priorLocSnr[i], normTmp - 11); // Q(normTmp)
- } else {
- den = WEBRTC_SPL_RSHIFT_U32(priorLocSnr[i], 11 - normTmp); // Q(normTmp)
- }
- if (den > 0) {
- besselTmpFX32 -= WEBRTC_SPL_UDIV(num, den); // Q11
- } else {
- besselTmpFX32 -= num; // Q11
- }
-
- // inst->logLrtTimeAvg[i] += LRT_TAVG * (besselTmp - log(snrLocPrior) - inst->logLrtTimeAvg[i]);
- // Here, LRT_TAVG = 0.5
- zeros = WebRtcSpl_NormU32(priorLocSnr[i]);
- frac32 = (int32_t)(((priorLocSnr[i] << zeros) & 0x7FFFFFFF) >> 19);
- tmp32 = WEBRTC_SPL_MUL(frac32, frac32);
- tmp32 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(tmp32, -43), 19);
- tmp32 += WEBRTC_SPL_MUL_16_16_RSFT((int16_t)frac32, 5412, 12);
- frac32 = tmp32 + 37;
- // tmp32 = log2(priorLocSnr[i])
- tmp32 = (int32_t)(((31 - zeros) << 12) + frac32) - (11 << 12); // Q12
- logTmp = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_32_16(tmp32, 178), 8); // log2(priorLocSnr[i])*log(2)
- tmp32no1 = WEBRTC_SPL_RSHIFT_W32(logTmp + inst->logLrtTimeAvgW32[i], 1); // Q12
- inst->logLrtTimeAvgW32[i] += (besselTmpFX32 - tmp32no1); // Q12
-
- logLrtTimeAvgKsumFX += inst->logLrtTimeAvgW32[i]; // Q12
- }
- inst->featureLogLrt = WEBRTC_SPL_RSHIFT_W32(logLrtTimeAvgKsumFX * 5, inst->stages + 10); // 5 = BIN_SIZE_LRT / 2
- // done with computation of LR factor
-
- //
- //compute the indicator functions
- //
-
- // average LRT feature
- // FLOAT code
- // indicator0 = 0.5 * (tanh(widthPrior * (logLrtTimeAvgKsum - threshPrior0)) + 1.0);
- tmpIndFX = 16384; // Q14(1.0)
- tmp32no1 = logLrtTimeAvgKsumFX - inst->thresholdLogLrt; // Q12
- nShifts = 7 - inst->stages; // WIDTH_PR_MAP_SHIFT - inst->stages + 5;
- //use larger width in tanh map for pause regions
- if (tmp32no1 < 0) {
- tmpIndFX = 0;
- tmp32no1 = -tmp32no1;
- //widthPrior = widthPrior * 2.0;
- nShifts++;
- }
- tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, nShifts); // Q14
- // compute indicator function: sigmoid map
- tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 14);
- if ((tableIndex < 16) && (tableIndex >= 0)) {
- tmp16no2 = kIndicatorTable[tableIndex];
- tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
- frac = (int16_t)(tmp32no1 & 0x00003fff); // Q14
- tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14);
- if (tmpIndFX == 0) {
- tmpIndFX = 8192 - tmp16no2; // Q14
- } else {
- tmpIndFX = 8192 + tmp16no2; // Q14
- }
- }
- indPriorFX = WEBRTC_SPL_MUL_16_16(inst->weightLogLrt, tmpIndFX); // 6*Q14
-
- //spectral flatness feature
- if (inst->weightSpecFlat) {
- tmpU32no1 = WEBRTC_SPL_UMUL(inst->featureSpecFlat, 400); // Q10
- tmpIndFX = 16384; // Q14(1.0)
- //use larger width in tanh map for pause regions
- tmpU32no2 = inst->thresholdSpecFlat - tmpU32no1; //Q10
- nShifts = 4;
- if (inst->thresholdSpecFlat < tmpU32no1) {
- tmpIndFX = 0;
- tmpU32no2 = tmpU32no1 - inst->thresholdSpecFlat;
- //widthPrior = widthPrior * 2.0;
- nShifts++;
- }
- tmp32no1 = (int32_t)WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2,
- nShifts), 25); //Q14
- tmpU32no1 = WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2, nShifts), 25); //Q14
- // compute indicator function: sigmoid map
- // FLOAT code
- // indicator1 = 0.5 * (tanh(sgnMap * widthPrior * (threshPrior1 - tmpFloat1)) + 1.0);
- tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14);
- if (tableIndex < 16) {
- tmp16no2 = kIndicatorTable[tableIndex];
- tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
- frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
- tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14);
- if (tmpIndFX) {
- tmpIndFX = 8192 + tmp16no2; // Q14
- } else {
- tmpIndFX = 8192 - tmp16no2; // Q14
- }
- }
- indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecFlat, tmpIndFX); // 6*Q14
- }
-
- //for template spectral-difference
- if (inst->weightSpecDiff) {
- tmpU32no1 = 0;
- if (inst->featureSpecDiff) {
- normTmp = WEBRTC_SPL_MIN(20 - inst->stages,
- WebRtcSpl_NormU32(inst->featureSpecDiff));
- tmpU32no1 = WEBRTC_SPL_LSHIFT_U32(inst->featureSpecDiff, normTmp); // Q(normTmp-2*stages)
- tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(inst->timeAvgMagnEnergy, 20 - inst->stages
- - normTmp);
- if (tmpU32no2 > 0) {
- // Q(20 - inst->stages)
- tmpU32no1 = WEBRTC_SPL_UDIV(tmpU32no1, tmpU32no2);
- } else {
- tmpU32no1 = (uint32_t)(0x7fffffff);
- }
- }
- tmpU32no3 = WEBRTC_SPL_UDIV(WEBRTC_SPL_LSHIFT_U32(inst->thresholdSpecDiff, 17), 25);
- tmpU32no2 = tmpU32no1 - tmpU32no3;
- nShifts = 1;
- tmpIndFX = 16384; // Q14(1.0)
- //use larger width in tanh map for pause regions
- if (tmpU32no2 & 0x80000000) {
- tmpIndFX = 0;
- tmpU32no2 = tmpU32no3 - tmpU32no1;
- //widthPrior = widthPrior * 2.0;
- nShifts--;
- }
- tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no2, nShifts);
- // compute indicator function: sigmoid map
- /* FLOAT code
- indicator2 = 0.5 * (tanh(widthPrior * (tmpFloat1 - threshPrior2)) + 1.0);
- */
- tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14);
- if (tableIndex < 16) {
- tmp16no2 = kIndicatorTable[tableIndex];
- tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
- frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
- tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
- tmp16no1, frac, 14);
- if (tmpIndFX) {
- tmpIndFX = 8192 + tmp16no2;
- } else {
- tmpIndFX = 8192 - tmp16no2;
- }
- }
- indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecDiff, tmpIndFX); // 6*Q14
- }
-
- //combine the indicator function with the feature weights
- // FLOAT code
- // indPrior = 1 - (weightIndPrior0 * indicator0 + weightIndPrior1 * indicator1 + weightIndPrior2 * indicator2);
- indPriorFX16 = WebRtcSpl_DivW32W16ResW16(98307 - indPriorFX, 6); // Q14
- // done with computing indicator function
-
- //compute the prior probability
- // FLOAT code
- // inst->priorNonSpeechProb += PRIOR_UPDATE * (indPriorNonSpeech - inst->priorNonSpeechProb);
- tmp16 = indPriorFX16 - inst->priorNonSpeechProb; // Q14
- inst->priorNonSpeechProb += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(
- PRIOR_UPDATE_Q14, tmp16, 14); // Q14
-
- //final speech probability: combine prior model with LR factor:
-
- memset(nonSpeechProbFinal, 0, sizeof(uint16_t) * inst->magnLen);
-
- if (inst->priorNonSpeechProb > 0) {
- for (i = 0; i < inst->magnLen; i++) {
- // FLOAT code
- // invLrt = exp(inst->logLrtTimeAvg[i]);
- // invLrt = inst->priorSpeechProb * invLrt;
- // nonSpeechProbFinal[i] = (1.0 - inst->priorSpeechProb) / (1.0 - inst->priorSpeechProb + invLrt);
- // invLrt = (1.0 - inst->priorNonSpeechProb) * invLrt;
- // nonSpeechProbFinal[i] = inst->priorNonSpeechProb / (inst->priorNonSpeechProb + invLrt);
- if (inst->logLrtTimeAvgW32[i] < 65300) {
- tmp32no1 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(inst->logLrtTimeAvgW32[i], 23637),
- 14); // Q12
- intPart = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 12);
- if (intPart < -8) {
- intPart = -8;
- }
- frac = (int16_t)(tmp32no1 & 0x00000fff); // Q12
-
- // Quadratic approximation of 2^frac
- tmp32no2 = WEBRTC_SPL_RSHIFT_W32(frac * frac * 44, 19); // Q12
- tmp32no2 += WEBRTC_SPL_MUL_16_16_RSFT(frac, 84, 7); // Q12
- invLrtFX = WEBRTC_SPL_LSHIFT_W32(1, 8 + intPart)
- + WEBRTC_SPL_SHIFT_W32(tmp32no2, intPart - 4); // Q8
-
- normTmp = WebRtcSpl_NormW32(invLrtFX);
- normTmp2 = WebRtcSpl_NormW16((16384 - inst->priorNonSpeechProb));
- if (normTmp + normTmp2 >= 7) {
- if (normTmp + normTmp2 < 15) {
- invLrtFX = WEBRTC_SPL_RSHIFT_W32(invLrtFX, 15 - normTmp2 - normTmp);
- // Q(normTmp+normTmp2-7)
- tmp32no1 = WEBRTC_SPL_MUL_32_16(invLrtFX, (16384 - inst->priorNonSpeechProb));
- // Q(normTmp+normTmp2+7)
- invLrtFX = WEBRTC_SPL_SHIFT_W32(tmp32no1, 7 - normTmp - normTmp2); // Q14
- } else {
- tmp32no1 = WEBRTC_SPL_MUL_32_16(invLrtFX, (16384 - inst->priorNonSpeechProb)); // Q22
- invLrtFX = WEBRTC_SPL_RSHIFT_W32(tmp32no1, 8); // Q14
- }
-
- tmp32no1 = WEBRTC_SPL_LSHIFT_W32((int32_t)inst->priorNonSpeechProb, 8); // Q22
-
- nonSpeechProbFinal[i] = (uint16_t)WEBRTC_SPL_DIV(tmp32no1,
- (int32_t)inst->priorNonSpeechProb + invLrtFX); // Q8
- }
- }
- }
- }
-}
-
// Transform input (speechFrame) to frequency domain magnitude (magnU16)
void WebRtcNsx_DataAnalysis(NsxInst_t* inst, short* speechFrame, uint16_t* magnU16) {
@@ -1461,7 +1244,7 @@ void WebRtcNsx_DataAnalysis(NsxInst_t* inst, short* speechFrame, uint16_t* magnU
right_shifts_in_magnU16 = WEBRTC_SPL_MAX(right_shifts_in_magnU16, 0);
// create realImag as winData interleaved with zeros (= imag. part), normalize it
- NormalizeRealBuffer(inst, winData, realImag);
+ WebRtcNsx_NormalizeRealBuffer(inst, winData, realImag);
// FFT output will be in winData[].
WebRtcSpl_RealForwardFFT(inst->real_fft, realImag, winData);
@@ -1624,9 +1407,9 @@ void WebRtcNsx_DataAnalysis(NsxInst_t* inst, short* speechFrame, uint16_t* magnU
tmpU32no1 = WEBRTC_SPL_RSHIFT_U32((uint32_t)sum_log_i_log_magn, 12); // Q5
// Shift the largest value of sum_log_i and tmp32no3 before multiplication
- tmp_u16 = WEBRTC_SPL_LSHIFT_U16((uint16_t)sum_log_i, 1); // Q6
+ tmp_u16 = ((uint16_t)sum_log_i << 1); // Q6
if ((uint32_t)sum_log_i > tmpU32no1) {
- tmp_u16 = WEBRTC_SPL_RSHIFT_U16(tmp_u16, zeros);
+ tmp_u16 >>= zeros;
} else {
tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no1, zeros);
}
@@ -1693,7 +1476,7 @@ void WebRtcNsx_DataSynthesis(NsxInst_t* inst, short* outFrame) {
// Inverse FFT output will be in rfft_out[].
outCIFFT = WebRtcSpl_RealInverseFFT(inst->real_fft, realImag, rfft_out);
- Denormalize(inst, rfft_out, outCIFFT);
+ WebRtcNsx_Denormalize(inst, rfft_out, outCIFFT);
//scale factor: only do it after END_STARTUP_LONG time
gainFactor = 8192; // 8192 = Q13(1.0)
@@ -2288,8 +2071,8 @@ int WebRtcNsx_ProcessCore(NsxInst_t* inst, short* speechFrame, short* speechFram
tmpU16no1 += nonSpeechProbFinal[i]; // Q8
tmpU32no1 += (uint32_t)(inst->noiseSupFilter[i]); // Q14
}
- avgProbSpeechHB = (int16_t)(4096
- - WEBRTC_SPL_RSHIFT_U16(tmpU16no1, inst->stages - 7)); // Q12
+ assert(inst->stages >= 7);
+ avgProbSpeechHB = (4096 - (tmpU16no1 >> (inst->stages - 7))); // Q12
avgFilterGainHB = (int16_t)WEBRTC_SPL_RSHIFT_U32(
tmpU32no1, inst->stages - 3); // Q14
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h
index 1ad369ffbeb..5b3c5e78f4e 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core.h
@@ -201,6 +201,23 @@ typedef void (*AnalysisUpdate)(NsxInst_t* inst,
int16_t* new_speech);
extern AnalysisUpdate WebRtcNsx_AnalysisUpdate;
+// Denormalize the real-valued signal |in|, the output from inverse FFT.
+typedef void (*Denormalize) (NsxInst_t* inst, int16_t* in, int factor);
+extern Denormalize WebRtcNsx_Denormalize;
+
+// Normalize the real-valued signal |in|, the input to forward FFT.
+typedef void (*NormalizeRealBuffer) (NsxInst_t* inst,
+ const int16_t* in,
+ int16_t* out);
+extern NormalizeRealBuffer WebRtcNsx_NormalizeRealBuffer;
+
+// Compute speech/noise probability.
+// Intended to be private.
+void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst,
+ uint16_t* nonSpeechProbFinal,
+ uint32_t* priorLocSnr,
+ uint32_t* postLocSnr);
+
#if (defined WEBRTC_DETECT_ARM_NEON) || defined (WEBRTC_ARCH_ARM_NEON)
// For the above function pointers, functions for generic platforms are declared
// and defined as static in file nsx_core.c, while those for ARM Neon platforms
@@ -218,6 +235,26 @@ void WebRtcNsx_AnalysisUpdateNeon(NsxInst_t* inst,
void WebRtcNsx_PrepareSpectrumNeon(NsxInst_t* inst, int16_t* freq_buff);
#endif
+#if defined(MIPS32_LE)
+// For the above function pointers, functions for generic platforms are declared
+// and defined as static in file nsx_core.c, while those for MIPS platforms
+// are declared below and defined in file nsx_core_mips.c.
+void WebRtcNsx_SynthesisUpdate_mips(NsxInst_t* inst,
+ int16_t* out_frame,
+ int16_t gain_factor);
+void WebRtcNsx_AnalysisUpdate_mips(NsxInst_t* inst,
+ int16_t* out,
+ int16_t* new_speech);
+void WebRtcNsx_PrepareSpectrum_mips(NsxInst_t* inst, int16_t* freq_buff);
+void WebRtcNsx_NormalizeRealBuffer_mips(NsxInst_t* inst,
+ const int16_t* in,
+ int16_t* out);
+#if defined(MIPS_DSP_R1_LE)
+void WebRtcNsx_Denormalize_mips(NsxInst_t* inst, int16_t* in, int factor);
+#endif
+
+#endif
+
#ifdef __cplusplus
}
#endif
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_c.c b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_c.c
new file mode 100644
index 00000000000..452b96e77b0
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_c.c
@@ -0,0 +1,273 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_processing/ns/include/noise_suppression_x.h"
+#include "webrtc/modules/audio_processing/ns/nsx_core.h"
+
+static const int16_t kIndicatorTable[17] = {
+ 0, 2017, 3809, 5227, 6258, 6963, 7424, 7718,
+ 7901, 8014, 8084, 8126, 8152, 8168, 8177, 8183, 8187
+};
+
+// Compute speech/noise probability
+// speech/noise probability is returned in: probSpeechFinal
+//snrLocPrior is the prior SNR for each frequency (in Q11)
+//snrLocPost is the post SNR for each frequency (in Q11)
+void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst,
+ uint16_t* nonSpeechProbFinal,
+ uint32_t* priorLocSnr,
+ uint32_t* postLocSnr) {
+
+ uint32_t zeros, num, den, tmpU32no1, tmpU32no2, tmpU32no3;
+ int32_t invLrtFX, indPriorFX, tmp32, tmp32no1, tmp32no2, besselTmpFX32;
+ int32_t frac32, logTmp;
+ int32_t logLrtTimeAvgKsumFX;
+ int16_t indPriorFX16;
+ int16_t tmp16, tmp16no1, tmp16no2, tmpIndFX, tableIndex, frac, intPart;
+ int i, normTmp, normTmp2, nShifts;
+
+ // compute feature based on average LR factor
+ // this is the average over all frequencies of the smooth log LRT
+ logLrtTimeAvgKsumFX = 0;
+ for (i = 0; i < inst->magnLen; i++) {
+ besselTmpFX32 = (int32_t)postLocSnr[i]; // Q11
+ normTmp = WebRtcSpl_NormU32(postLocSnr[i]);
+ num = WEBRTC_SPL_LSHIFT_U32(postLocSnr[i], normTmp); // Q(11+normTmp)
+ if (normTmp > 10) {
+ den = WEBRTC_SPL_LSHIFT_U32(priorLocSnr[i], normTmp - 11); // Q(normTmp)
+ } else {
+ den = WEBRTC_SPL_RSHIFT_U32(priorLocSnr[i], 11 - normTmp); // Q(normTmp)
+ }
+ if (den > 0) {
+ besselTmpFX32 -= WEBRTC_SPL_UDIV(num, den); // Q11
+ } else {
+ besselTmpFX32 -= num; // Q11
+ }
+
+ // inst->logLrtTimeAvg[i] += LRT_TAVG * (besselTmp - log(snrLocPrior)
+ // - inst->logLrtTimeAvg[i]);
+ // Here, LRT_TAVG = 0.5
+ zeros = WebRtcSpl_NormU32(priorLocSnr[i]);
+ frac32 = (int32_t)(((priorLocSnr[i] << zeros) & 0x7FFFFFFF) >> 19);
+ tmp32 = WEBRTC_SPL_MUL(frac32, frac32);
+ tmp32 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(tmp32, -43), 19);
+ tmp32 += WEBRTC_SPL_MUL_16_16_RSFT((int16_t)frac32, 5412, 12);
+ frac32 = tmp32 + 37;
+ // tmp32 = log2(priorLocSnr[i])
+ tmp32 = (int32_t)(((31 - zeros) << 12) + frac32) - (11 << 12); // Q12
+ logTmp = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_32_16(tmp32, 178), 8);
+ // log2(priorLocSnr[i])*log(2)
+ tmp32no1 = WEBRTC_SPL_RSHIFT_W32(logTmp + inst->logLrtTimeAvgW32[i], 1);
+ // Q12
+ inst->logLrtTimeAvgW32[i] += (besselTmpFX32 - tmp32no1); // Q12
+
+ logLrtTimeAvgKsumFX += inst->logLrtTimeAvgW32[i]; // Q12
+ }
+ inst->featureLogLrt = WEBRTC_SPL_RSHIFT_W32(logLrtTimeAvgKsumFX * 5,
+ inst->stages + 10);
+ // 5 = BIN_SIZE_LRT / 2
+ // done with computation of LR factor
+
+ //
+ //compute the indicator functions
+ //
+
+ // average LRT feature
+ // FLOAT code
+ // indicator0 = 0.5 * (tanh(widthPrior *
+ // (logLrtTimeAvgKsum - threshPrior0)) + 1.0);
+ tmpIndFX = 16384; // Q14(1.0)
+ tmp32no1 = logLrtTimeAvgKsumFX - inst->thresholdLogLrt; // Q12
+ nShifts = 7 - inst->stages; // WIDTH_PR_MAP_SHIFT - inst->stages + 5;
+ //use larger width in tanh map for pause regions
+ if (tmp32no1 < 0) {
+ tmpIndFX = 0;
+ tmp32no1 = -tmp32no1;
+ //widthPrior = widthPrior * 2.0;
+ nShifts++;
+ }
+ tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, nShifts); // Q14
+ // compute indicator function: sigmoid map
+ tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 14);
+ if ((tableIndex < 16) && (tableIndex >= 0)) {
+ tmp16no2 = kIndicatorTable[tableIndex];
+ tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
+ frac = (int16_t)(tmp32no1 & 0x00003fff); // Q14
+ tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14);
+ if (tmpIndFX == 0) {
+ tmpIndFX = 8192 - tmp16no2; // Q14
+ } else {
+ tmpIndFX = 8192 + tmp16no2; // Q14
+ }
+ }
+ indPriorFX = WEBRTC_SPL_MUL_16_16(inst->weightLogLrt, tmpIndFX); // 6*Q14
+
+ //spectral flatness feature
+ if (inst->weightSpecFlat) {
+ tmpU32no1 = WEBRTC_SPL_UMUL(inst->featureSpecFlat, 400); // Q10
+ tmpIndFX = 16384; // Q14(1.0)
+ //use larger width in tanh map for pause regions
+ tmpU32no2 = inst->thresholdSpecFlat - tmpU32no1; //Q10
+ nShifts = 4;
+ if (inst->thresholdSpecFlat < tmpU32no1) {
+ tmpIndFX = 0;
+ tmpU32no2 = tmpU32no1 - inst->thresholdSpecFlat;
+ //widthPrior = widthPrior * 2.0;
+ nShifts++;
+ }
+ tmp32no1 = (int32_t)WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2,
+ nShifts), 25);
+ //Q14
+ tmpU32no1 = WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2, nShifts),
+ 25); //Q14
+ // compute indicator function: sigmoid map
+ // FLOAT code
+ // indicator1 = 0.5 * (tanh(sgnMap * widthPrior *
+ // (threshPrior1 - tmpFloat1)) + 1.0);
+ tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14);
+ if (tableIndex < 16) {
+ tmp16no2 = kIndicatorTable[tableIndex];
+ tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
+ frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
+ tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14);
+ if (tmpIndFX) {
+ tmpIndFX = 8192 + tmp16no2; // Q14
+ } else {
+ tmpIndFX = 8192 - tmp16no2; // Q14
+ }
+ }
+ indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecFlat, tmpIndFX); // 6*Q14
+ }
+
+ //for template spectral-difference
+ if (inst->weightSpecDiff) {
+ tmpU32no1 = 0;
+ if (inst->featureSpecDiff) {
+ normTmp = WEBRTC_SPL_MIN(20 - inst->stages,
+ WebRtcSpl_NormU32(inst->featureSpecDiff));
+ tmpU32no1 = WEBRTC_SPL_LSHIFT_U32(inst->featureSpecDiff, normTmp);
+ // Q(normTmp-2*stages)
+ tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(inst->timeAvgMagnEnergy,
+ 20 - inst->stages - normTmp);
+ if (tmpU32no2 > 0) {
+ // Q(20 - inst->stages)
+ tmpU32no1 = WEBRTC_SPL_UDIV(tmpU32no1, tmpU32no2);
+ } else {
+ tmpU32no1 = (uint32_t)(0x7fffffff);
+ }
+ }
+ tmpU32no3 = WEBRTC_SPL_UDIV(WEBRTC_SPL_LSHIFT_U32(inst->thresholdSpecDiff,
+ 17),
+ 25);
+ tmpU32no2 = tmpU32no1 - tmpU32no3;
+ nShifts = 1;
+ tmpIndFX = 16384; // Q14(1.0)
+ //use larger width in tanh map for pause regions
+ if (tmpU32no2 & 0x80000000) {
+ tmpIndFX = 0;
+ tmpU32no2 = tmpU32no3 - tmpU32no1;
+ //widthPrior = widthPrior * 2.0;
+ nShifts--;
+ }
+ tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no2, nShifts);
+ // compute indicator function: sigmoid map
+ /* FLOAT code
+ indicator2 = 0.5 * (tanh(widthPrior * (tmpFloat1 - threshPrior2)) + 1.0);
+ */
+ tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14);
+ if (tableIndex < 16) {
+ tmp16no2 = kIndicatorTable[tableIndex];
+ tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
+ frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
+ tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+ tmp16no1, frac, 14);
+ if (tmpIndFX) {
+ tmpIndFX = 8192 + tmp16no2;
+ } else {
+ tmpIndFX = 8192 - tmp16no2;
+ }
+ }
+ indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecDiff, tmpIndFX); // 6*Q14
+ }
+
+ //combine the indicator function with the feature weights
+ // FLOAT code
+ // indPrior = 1 - (weightIndPrior0 * indicator0 + weightIndPrior1 *
+ // indicator1 + weightIndPrior2 * indicator2);
+ indPriorFX16 = WebRtcSpl_DivW32W16ResW16(98307 - indPriorFX, 6); // Q14
+ // done with computing indicator function
+
+ //compute the prior probability
+ // FLOAT code
+ // inst->priorNonSpeechProb += PRIOR_UPDATE *
+ // (indPriorNonSpeech - inst->priorNonSpeechProb);
+ tmp16 = indPriorFX16 - inst->priorNonSpeechProb; // Q14
+ inst->priorNonSpeechProb += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(
+ PRIOR_UPDATE_Q14, tmp16, 14); // Q14
+
+ //final speech probability: combine prior model with LR factor:
+
+ memset(nonSpeechProbFinal, 0, sizeof(uint16_t) * inst->magnLen);
+
+ if (inst->priorNonSpeechProb > 0) {
+ for (i = 0; i < inst->magnLen; i++) {
+ // FLOAT code
+ // invLrt = exp(inst->logLrtTimeAvg[i]);
+ // invLrt = inst->priorSpeechProb * invLrt;
+ // nonSpeechProbFinal[i] = (1.0 - inst->priorSpeechProb) /
+ // (1.0 - inst->priorSpeechProb + invLrt);
+ // invLrt = (1.0 - inst->priorNonSpeechProb) * invLrt;
+ // nonSpeechProbFinal[i] = inst->priorNonSpeechProb /
+ // (inst->priorNonSpeechProb + invLrt);
+ if (inst->logLrtTimeAvgW32[i] < 65300) {
+ tmp32no1 = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(
+ inst->logLrtTimeAvgW32[i], 23637),
+ 14); // Q12
+ intPart = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 12);
+ if (intPart < -8) {
+ intPart = -8;
+ }
+ frac = (int16_t)(tmp32no1 & 0x00000fff); // Q12
+
+ // Quadratic approximation of 2^frac
+ tmp32no2 = WEBRTC_SPL_RSHIFT_W32(frac * frac * 44, 19); // Q12
+ tmp32no2 += WEBRTC_SPL_MUL_16_16_RSFT(frac, 84, 7); // Q12
+ invLrtFX = WEBRTC_SPL_LSHIFT_W32(1, 8 + intPart)
+ + WEBRTC_SPL_SHIFT_W32(tmp32no2, intPart - 4); // Q8
+
+ normTmp = WebRtcSpl_NormW32(invLrtFX);
+ normTmp2 = WebRtcSpl_NormW16((16384 - inst->priorNonSpeechProb));
+ if (normTmp + normTmp2 >= 7) {
+ if (normTmp + normTmp2 < 15) {
+ invLrtFX = WEBRTC_SPL_RSHIFT_W32(invLrtFX, 15 - normTmp2 - normTmp);
+ // Q(normTmp+normTmp2-7)
+ tmp32no1 = WEBRTC_SPL_MUL_32_16(invLrtFX,
+ (16384 - inst->priorNonSpeechProb));
+ // Q(normTmp+normTmp2+7)
+ invLrtFX = WEBRTC_SPL_SHIFT_W32(tmp32no1, 7 - normTmp - normTmp2);
+ // Q14
+ } else {
+ tmp32no1 = WEBRTC_SPL_MUL_32_16(invLrtFX,
+ (16384 - inst->priorNonSpeechProb));
+ // Q22
+ invLrtFX = WEBRTC_SPL_RSHIFT_W32(tmp32no1, 8); // Q14
+ }
+
+ tmp32no1 = WEBRTC_SPL_LSHIFT_W32((int32_t)inst->priorNonSpeechProb,
+ 8); // Q22
+
+ nonSpeechProbFinal[i] = (uint16_t)WEBRTC_SPL_DIV(tmp32no1,
+ (int32_t)inst->priorNonSpeechProb + invLrtFX); // Q8
+ }
+ }
+ }
+ }
+}
+
diff --git a/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_mips.c b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_mips.c
new file mode 100644
index 00000000000..ccb0c376324
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/ns/nsx_core_mips.c
@@ -0,0 +1,1008 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_processing/ns/include/noise_suppression_x.h"
+#include "webrtc/modules/audio_processing/ns/nsx_core.h"
+
+static const int16_t kIndicatorTable[17] = {
+ 0, 2017, 3809, 5227, 6258, 6963, 7424, 7718,
+ 7901, 8014, 8084, 8126, 8152, 8168, 8177, 8183, 8187
+};
+
+// Compute speech/noise probability
+// speech/noise probability is returned in: probSpeechFinal
+//snrLocPrior is the prior SNR for each frequency (in Q11)
+//snrLocPost is the post SNR for each frequency (in Q11)
+void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst,
+ uint16_t* nonSpeechProbFinal,
+ uint32_t* priorLocSnr,
+ uint32_t* postLocSnr) {
+
+ uint32_t tmpU32no1, tmpU32no2, tmpU32no3;
+ int32_t indPriorFX, tmp32no1;
+ int32_t logLrtTimeAvgKsumFX;
+ int16_t indPriorFX16;
+ int16_t tmp16, tmp16no1, tmp16no2, tmpIndFX, tableIndex, frac;
+ int i, normTmp, nShifts;
+
+ int32_t r0, r1, r2, r3, r4, r5, r6, r7, r8, r9;
+ int32_t const_max = 0x7fffffff;
+ int32_t const_neg43 = -43;
+ int32_t const_5412 = 5412;
+ int32_t const_11rsh12 = (11 << 12);
+ int32_t const_178 = 178;
+
+
+ // compute feature based on average LR factor
+ // this is the average over all frequencies of the smooth log LRT
+ logLrtTimeAvgKsumFX = 0;
+ for (i = 0; i < inst->magnLen; i++) {
+ r0 = postLocSnr[i]; // Q11
+ r1 = priorLocSnr[i];
+ r2 = inst->logLrtTimeAvgW32[i];
+
+ __asm __volatile(
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "clz %[r3], %[r0] \n\t"
+ "clz %[r5], %[r1] \n\t"
+ "slti %[r4], %[r3], 32 \n\t"
+ "slti %[r6], %[r5], 32 \n\t"
+ "movz %[r3], $0, %[r4] \n\t"
+ "movz %[r5], $0, %[r6] \n\t"
+ "slti %[r4], %[r3], 11 \n\t"
+ "addiu %[r6], %[r3], -11 \n\t"
+ "neg %[r7], %[r6] \n\t"
+ "sllv %[r6], %[r1], %[r6] \n\t"
+ "srav %[r7], %[r1], %[r7] \n\t"
+ "movn %[r6], %[r7], %[r4] \n\t"
+ "sllv %[r1], %[r1], %[r5] \n\t"
+ "and %[r1], %[r1], %[const_max] \n\t"
+ "sra %[r1], %[r1], 19 \n\t"
+ "mul %[r7], %[r1], %[r1] \n\t"
+ "sllv %[r3], %[r0], %[r3] \n\t"
+ "divu %[r8], %[r3], %[r6] \n\t"
+ "slti %[r6], %[r6], 1 \n\t"
+ "mul %[r7], %[r7], %[const_neg43] \n\t"
+ "sra %[r7], %[r7], 19 \n\t"
+ "movz %[r3], %[r8], %[r6] \n\t"
+ "subu %[r0], %[r0], %[r3] \n\t"
+ "mul %[r1], %[r1], %[const_5412] \n\t"
+ "sra %[r1], %[r1], 12 \n\t"
+ "addu %[r7], %[r7], %[r1] \n\t"
+ "addiu %[r1], %[r7], 37 \n\t"
+ "addiu %[r5], %[r5], -31 \n\t"
+ "neg %[r5], %[r5] \n\t"
+ "sll %[r5], %[r5], 12 \n\t"
+ "addu %[r5], %[r5], %[r1] \n\t"
+ "subu %[r7], %[r5], %[const_11rsh12] \n\t"
+ "mul %[r7], %[r7], %[const_178] \n\t"
+ "sra %[r7], %[r7], 8 \n\t"
+ "addu %[r7], %[r7], %[r2] \n\t"
+ "sra %[r7], %[r7], 1 \n\t"
+ "subu %[r2], %[r2], %[r7] \n\t"
+ "addu %[r2], %[r2], %[r0] \n\t"
+ ".set pop \n\t"
+ : [r0] "+r" (r0), [r1] "+r" (r1), [r2] "+r" (r2),
+ [r3] "=&r" (r3), [r4] "=&r" (r4), [r5] "=&r" (r5),
+ [r6] "=&r" (r6), [r7] "=&r" (r7), [r8] "=&r" (r8)
+ : [const_max] "r" (const_max), [const_neg43] "r" (const_neg43),
+ [const_5412] "r" (const_5412), [const_11rsh12] "r" (const_11rsh12),
+ [const_178] "r" (const_178)
+ : "hi", "lo"
+ );
+ inst->logLrtTimeAvgW32[i] = r2;
+ logLrtTimeAvgKsumFX += r2;
+ }
+
+ inst->featureLogLrt = WEBRTC_SPL_RSHIFT_W32(logLrtTimeAvgKsumFX * 5,
+ inst->stages + 10);
+ // 5 = BIN_SIZE_LRT / 2
+ // done with computation of LR factor
+
+ //
+ // compute the indicator functions
+ //
+
+ // average LRT feature
+ // FLOAT code
+ // indicator0 = 0.5 * (tanh(widthPrior *
+ // (logLrtTimeAvgKsum - threshPrior0)) + 1.0);
+ tmpIndFX = 16384; // Q14(1.0)
+ tmp32no1 = logLrtTimeAvgKsumFX - inst->thresholdLogLrt; // Q12
+ nShifts = 7 - inst->stages; // WIDTH_PR_MAP_SHIFT - inst->stages + 5;
+ //use larger width in tanh map for pause regions
+ if (tmp32no1 < 0) {
+ tmpIndFX = 0;
+ tmp32no1 = -tmp32no1;
+ //widthPrior = widthPrior * 2.0;
+ nShifts++;
+ }
+ tmp32no1 = WEBRTC_SPL_SHIFT_W32(tmp32no1, nShifts); // Q14
+ // compute indicator function: sigmoid map
+ tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32no1, 14);
+ if ((tableIndex < 16) && (tableIndex >= 0)) {
+ tmp16no2 = kIndicatorTable[tableIndex];
+ tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
+ frac = (int16_t)(tmp32no1 & 0x00003fff); // Q14
+ tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14);
+ if (tmpIndFX == 0) {
+ tmpIndFX = 8192 - tmp16no2; // Q14
+ } else {
+ tmpIndFX = 8192 + tmp16no2; // Q14
+ }
+ }
+ indPriorFX = WEBRTC_SPL_MUL_16_16(inst->weightLogLrt, tmpIndFX); // 6*Q14
+
+ //spectral flatness feature
+ if (inst->weightSpecFlat) {
+ tmpU32no1 = WEBRTC_SPL_UMUL(inst->featureSpecFlat, 400); // Q10
+ tmpIndFX = 16384; // Q14(1.0)
+ //use larger width in tanh map for pause regions
+ tmpU32no2 = inst->thresholdSpecFlat - tmpU32no1; //Q10
+ nShifts = 4;
+ if (inst->thresholdSpecFlat < tmpU32no1) {
+ tmpIndFX = 0;
+ tmpU32no2 = tmpU32no1 - inst->thresholdSpecFlat;
+ //widthPrior = widthPrior * 2.0;
+ nShifts++;
+ }
+ tmp32no1 = (int32_t)WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2,
+ nShifts), 25);
+ //Q14
+ tmpU32no1 = WebRtcSpl_DivU32U16(WEBRTC_SPL_LSHIFT_U32(tmpU32no2, nShifts),
+ 25); //Q14
+ // compute indicator function: sigmoid map
+ // FLOAT code
+ // indicator1 = 0.5 * (tanh(sgnMap * widthPrior *
+ // (threshPrior1 - tmpFloat1)) + 1.0);
+ tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14);
+ if (tableIndex < 16) {
+ tmp16no2 = kIndicatorTable[tableIndex];
+ tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
+ frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
+ tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp16no1, frac, 14);
+ if (tmpIndFX) {
+ tmpIndFX = 8192 + tmp16no2; // Q14
+ } else {
+ tmpIndFX = 8192 - tmp16no2; // Q14
+ }
+ }
+ indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecFlat, tmpIndFX); // 6*Q14
+ }
+
+ //for template spectral-difference
+ if (inst->weightSpecDiff) {
+ tmpU32no1 = 0;
+ if (inst->featureSpecDiff) {
+ normTmp = WEBRTC_SPL_MIN(20 - inst->stages,
+ WebRtcSpl_NormU32(inst->featureSpecDiff));
+ tmpU32no1 = WEBRTC_SPL_LSHIFT_U32(inst->featureSpecDiff, normTmp);
+ // Q(normTmp-2*stages)
+ tmpU32no2 = WEBRTC_SPL_RSHIFT_U32(inst->timeAvgMagnEnergy,
+ 20 - inst->stages - normTmp);
+ if (tmpU32no2 > 0) {
+ // Q(20 - inst->stages)
+ tmpU32no1 = WEBRTC_SPL_UDIV(tmpU32no1, tmpU32no2);
+ } else {
+ tmpU32no1 = (uint32_t)(0x7fffffff);
+ }
+ }
+ tmpU32no3 = WEBRTC_SPL_UDIV(WEBRTC_SPL_LSHIFT_U32(inst->thresholdSpecDiff,
+ 17),
+ 25);
+ tmpU32no2 = tmpU32no1 - tmpU32no3;
+ nShifts = 1;
+ tmpIndFX = 16384; // Q14(1.0)
+ //use larger width in tanh map for pause regions
+ if (tmpU32no2 & 0x80000000) {
+ tmpIndFX = 0;
+ tmpU32no2 = tmpU32no3 - tmpU32no1;
+ //widthPrior = widthPrior * 2.0;
+ nShifts--;
+ }
+ tmpU32no1 = WEBRTC_SPL_RSHIFT_U32(tmpU32no2, nShifts);
+ // compute indicator function: sigmoid map
+ /* FLOAT code
+ indicator2 = 0.5 * (tanh(widthPrior * (tmpFloat1 - threshPrior2)) + 1.0);
+ */
+ tableIndex = (int16_t)WEBRTC_SPL_RSHIFT_U32(tmpU32no1, 14);
+ if (tableIndex < 16) {
+ tmp16no2 = kIndicatorTable[tableIndex];
+ tmp16no1 = kIndicatorTable[tableIndex + 1] - kIndicatorTable[tableIndex];
+ frac = (int16_t)(tmpU32no1 & 0x00003fff); // Q14
+ tmp16no2 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+ tmp16no1, frac, 14);
+ if (tmpIndFX) {
+ tmpIndFX = 8192 + tmp16no2;
+ } else {
+ tmpIndFX = 8192 - tmp16no2;
+ }
+ }
+ indPriorFX += WEBRTC_SPL_MUL_16_16(inst->weightSpecDiff, tmpIndFX); // 6*Q14
+ }
+
+ //combine the indicator function with the feature weights
+ // FLOAT code
+ // indPrior = 1 - (weightIndPrior0 * indicator0 + weightIndPrior1 *
+ // indicator1 + weightIndPrior2 * indicator2);
+ indPriorFX16 = WebRtcSpl_DivW32W16ResW16(98307 - indPriorFX, 6); // Q14
+ // done with computing indicator function
+
+ //compute the prior probability
+ // FLOAT code
+ // inst->priorNonSpeechProb += PRIOR_UPDATE *
+ // (indPriorNonSpeech - inst->priorNonSpeechProb);
+ tmp16 = indPriorFX16 - inst->priorNonSpeechProb; // Q14
+ inst->priorNonSpeechProb += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(
+ PRIOR_UPDATE_Q14, tmp16, 14); // Q14
+
+ //final speech probability: combine prior model with LR factor:
+
+ memset(nonSpeechProbFinal, 0, sizeof(uint16_t) * inst->magnLen);
+
+ if (inst->priorNonSpeechProb > 0) {
+ r0 = inst->priorNonSpeechProb;
+ r1 = 16384 - r0;
+ int32_t const_23637 = 23637;
+ int32_t const_44 = 44;
+ int32_t const_84 = 84;
+ int32_t const_1 = 1;
+ int32_t const_neg8 = -8;
+ for (i = 0; i < inst->magnLen; i++) {
+ r2 = inst->logLrtTimeAvgW32[i];
+ if (r2 < 65300) {
+ __asm __volatile(
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "mul %[r2], %[r2], %[const_23637] \n\t"
+ "sll %[r6], %[r1], 16 \n\t"
+ "clz %[r7], %[r6] \n\t"
+ "clo %[r8], %[r6] \n\t"
+ "slt %[r9], %[r6], $0 \n\t"
+ "movn %[r7], %[r8], %[r9] \n\t"
+ "sra %[r2], %[r2], 14 \n\t"
+ "andi %[r3], %[r2], 0xfff \n\t"
+ "mul %[r4], %[r3], %[r3] \n\t"
+ "mul %[r3], %[r3], %[const_84] \n\t"
+ "sra %[r2], %[r2], 12 \n\t"
+ "slt %[r5], %[r2], %[const_neg8] \n\t"
+ "movn %[r2], %[const_neg8], %[r5] \n\t"
+ "mul %[r4], %[r4], %[const_44] \n\t"
+ "sra %[r3], %[r3], 7 \n\t"
+ "addiu %[r7], %[r7], -1 \n\t"
+ "slti %[r9], %[r7], 31 \n\t"
+ "movz %[r7], $0, %[r9] \n\t"
+ "sra %[r4], %[r4], 19 \n\t"
+ "addu %[r4], %[r4], %[r3] \n\t"
+ "addiu %[r3], %[r2], 8 \n\t"
+ "addiu %[r2], %[r2], -4 \n\t"
+ "neg %[r5], %[r2] \n\t"
+ "sllv %[r6], %[r4], %[r2] \n\t"
+ "srav %[r5], %[r4], %[r5] \n\t"
+ "slt %[r2], %[r2], $0 \n\t"
+ "movn %[r6], %[r5], %[r2] \n\t"
+ "sllv %[r3], %[const_1], %[r3] \n\t"
+ "addu %[r2], %[r3], %[r6] \n\t"
+ "clz %[r4], %[r2] \n\t"
+ "clo %[r5], %[r2] \n\t"
+ "slt %[r8], %[r2], $0 \n\t"
+ "movn %[r4], %[r5], %[r8] \n\t"
+ "addiu %[r4], %[r4], -1 \n\t"
+ "slt %[r5], $0, %[r2] \n\t"
+ "or %[r5], %[r5], %[r7] \n\t"
+ "movz %[r4], $0, %[r5] \n\t"
+ "addiu %[r6], %[r7], -7 \n\t"
+ "addu %[r6], %[r6], %[r4] \n\t"
+ "bltz %[r6], 1f \n\t"
+ " nop \n\t"
+ "addiu %[r4], %[r6], -8 \n\t"
+ "neg %[r3], %[r4] \n\t"
+ "srav %[r5], %[r2], %[r3] \n\t"
+ "mul %[r5], %[r5], %[r1] \n\t"
+ "mul %[r2], %[r2], %[r1] \n\t"
+ "slt %[r4], %[r4], $0 \n\t"
+ "srav %[r5], %[r5], %[r6] \n\t"
+ "sra %[r2], %[r2], 8 \n\t"
+ "movn %[r2], %[r5], %[r4] \n\t"
+ "sll %[r3], %[r0], 8 \n\t"
+ "addu %[r2], %[r0], %[r2] \n\t"
+ "divu %[r3], %[r3], %[r2] \n\t"
+ "1: \n\t"
+ ".set pop \n\t"
+ : [r2] "+r" (r2), [r3] "=&r" (r3), [r4] "=&r" (r4),
+ [r5] "=&r" (r5), [r6] "=&r" (r6), [r7] "=&r" (r7),
+ [r8] "=&r" (r8), [r9] "=&r" (r9)
+ : [r0] "r" (r0), [r1] "r" (r1), [const_23637] "r" (const_23637),
+ [const_neg8] "r" (const_neg8), [const_84] "r" (const_84),
+ [const_1] "r" (const_1), [const_44] "r" (const_44)
+ : "hi", "lo"
+ );
+ nonSpeechProbFinal[i] = r3;
+ }
+ }
+ }
+}
+
+// Update analysis buffer for lower band, and window data before FFT.
+void WebRtcNsx_AnalysisUpdate_mips(NsxInst_t* inst,
+ int16_t* out,
+ int16_t* new_speech) {
+
+ int iters, after;
+ int anaLen = inst->anaLen;
+ int *window = (int*)inst->window;
+ int *anaBuf = (int*)inst->analysisBuffer;
+ int *outBuf = (int*)out;
+ int r0, r1, r2, r3, r4, r5, r6, r7;
+#if defined(MIPS_DSP_R1_LE)
+ int r8;
+#endif
+
+ // For lower band update analysis buffer.
+ WEBRTC_SPL_MEMCPY_W16(inst->analysisBuffer,
+ inst->analysisBuffer + inst->blockLen10ms,
+ inst->anaLen - inst->blockLen10ms);
+ WEBRTC_SPL_MEMCPY_W16(inst->analysisBuffer
+ + inst->anaLen - inst->blockLen10ms, new_speech, inst->blockLen10ms);
+
+ // Window data before FFT.
+#if defined(MIPS_DSP_R1_LE)
+ __asm __volatile(
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "sra %[iters], %[anaLen], 3 \n\t"
+ "1: \n\t"
+ "blez %[iters], 2f \n\t"
+ " nop \n\t"
+ "lw %[r0], 0(%[window]) \n\t"
+ "lw %[r1], 0(%[anaBuf]) \n\t"
+ "lw %[r2], 4(%[window]) \n\t"
+ "lw %[r3], 4(%[anaBuf]) \n\t"
+ "lw %[r4], 8(%[window]) \n\t"
+ "lw %[r5], 8(%[anaBuf]) \n\t"
+ "lw %[r6], 12(%[window]) \n\t"
+ "lw %[r7], 12(%[anaBuf]) \n\t"
+ "muleq_s.w.phl %[r8], %[r0], %[r1] \n\t"
+ "muleq_s.w.phr %[r0], %[r0], %[r1] \n\t"
+ "muleq_s.w.phl %[r1], %[r2], %[r3] \n\t"
+ "muleq_s.w.phr %[r2], %[r2], %[r3] \n\t"
+ "muleq_s.w.phl %[r3], %[r4], %[r5] \n\t"
+ "muleq_s.w.phr %[r4], %[r4], %[r5] \n\t"
+ "muleq_s.w.phl %[r5], %[r6], %[r7] \n\t"
+ "muleq_s.w.phr %[r6], %[r6], %[r7] \n\t"
+#if defined(MIPS_DSP_R2_LE)
+ "precr_sra_r.ph.w %[r8], %[r0], 15 \n\t"
+ "precr_sra_r.ph.w %[r1], %[r2], 15 \n\t"
+ "precr_sra_r.ph.w %[r3], %[r4], 15 \n\t"
+ "precr_sra_r.ph.w %[r5], %[r6], 15 \n\t"
+ "sw %[r8], 0(%[outBuf]) \n\t"
+ "sw %[r1], 4(%[outBuf]) \n\t"
+ "sw %[r3], 8(%[outBuf]) \n\t"
+ "sw %[r5], 12(%[outBuf]) \n\t"
+#else
+ "shra_r.w %[r8], %[r8], 15 \n\t"
+ "shra_r.w %[r0], %[r0], 15 \n\t"
+ "shra_r.w %[r1], %[r1], 15 \n\t"
+ "shra_r.w %[r2], %[r2], 15 \n\t"
+ "shra_r.w %[r3], %[r3], 15 \n\t"
+ "shra_r.w %[r4], %[r4], 15 \n\t"
+ "shra_r.w %[r5], %[r5], 15 \n\t"
+ "shra_r.w %[r6], %[r6], 15 \n\t"
+ "sll %[r0], %[r0], 16 \n\t"
+ "sll %[r2], %[r2], 16 \n\t"
+ "sll %[r4], %[r4], 16 \n\t"
+ "sll %[r6], %[r6], 16 \n\t"
+ "packrl.ph %[r0], %[r8], %[r0] \n\t"
+ "packrl.ph %[r2], %[r1], %[r2] \n\t"
+ "packrl.ph %[r4], %[r3], %[r4] \n\t"
+ "packrl.ph %[r6], %[r5], %[r6] \n\t"
+ "sw %[r0], 0(%[outBuf]) \n\t"
+ "sw %[r2], 4(%[outBuf]) \n\t"
+ "sw %[r4], 8(%[outBuf]) \n\t"
+ "sw %[r6], 12(%[outBuf]) \n\t"
+#endif
+ "addiu %[window], %[window], 16 \n\t"
+ "addiu %[anaBuf], %[anaBuf], 16 \n\t"
+ "addiu %[outBuf], %[outBuf], 16 \n\t"
+ "b 1b \n\t"
+ " addiu %[iters], %[iters], -1 \n\t"
+ "2: \n\t"
+ "andi %[after], %[anaLen], 7 \n\t"
+ "3: \n\t"
+ "blez %[after], 4f \n\t"
+ " nop \n\t"
+ "lh %[r0], 0(%[window]) \n\t"
+ "lh %[r1], 0(%[anaBuf]) \n\t"
+ "mul %[r0], %[r0], %[r1] \n\t"
+ "addiu %[window], %[window], 2 \n\t"
+ "addiu %[anaBuf], %[anaBuf], 2 \n\t"
+ "addiu %[outBuf], %[outBuf], 2 \n\t"
+ "shra_r.w %[r0], %[r0], 14 \n\t"
+ "sh %[r0], -2(%[outBuf]) \n\t"
+ "b 3b \n\t"
+ " addiu %[after], %[after], -1 \n\t"
+ "4: \n\t"
+ ".set pop \n\t"
+ : [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2),
+ [r3] "=&r" (r3), [r4] "=&r" (r4), [r5] "=&r" (r5),
+ [r6] "=&r" (r6), [r7] "=&r" (r7), [r8] "=&r" (r8),
+ [iters] "=&r" (iters), [after] "=&r" (after),
+ [window] "+r" (window),[anaBuf] "+r" (anaBuf),
+ [outBuf] "+r" (outBuf)
+ : [anaLen] "r" (anaLen)
+ : "memory", "hi", "lo"
+ );
+#else
+ __asm __volatile(
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "sra %[iters], %[anaLen], 2 \n\t"
+ "1: \n\t"
+ "blez %[iters], 2f \n\t"
+ " nop \n\t"
+ "lh %[r0], 0(%[window]) \n\t"
+ "lh %[r1], 0(%[anaBuf]) \n\t"
+ "lh %[r2], 2(%[window]) \n\t"
+ "lh %[r3], 2(%[anaBuf]) \n\t"
+ "lh %[r4], 4(%[window]) \n\t"
+ "lh %[r5], 4(%[anaBuf]) \n\t"
+ "lh %[r6], 6(%[window]) \n\t"
+ "lh %[r7], 6(%[anaBuf]) \n\t"
+ "mul %[r0], %[r0], %[r1] \n\t"
+ "mul %[r2], %[r2], %[r3] \n\t"
+ "mul %[r4], %[r4], %[r5] \n\t"
+ "mul %[r6], %[r6], %[r7] \n\t"
+ "addiu %[window], %[window], 8 \n\t"
+ "addiu %[anaBuf], %[anaBuf], 8 \n\t"
+ "addiu %[r0], %[r0], 0x2000 \n\t"
+ "addiu %[r2], %[r2], 0x2000 \n\t"
+ "addiu %[r4], %[r4], 0x2000 \n\t"
+ "addiu %[r6], %[r6], 0x2000 \n\t"
+ "sra %[r0], %[r0], 14 \n\t"
+ "sra %[r2], %[r2], 14 \n\t"
+ "sra %[r4], %[r4], 14 \n\t"
+ "sra %[r6], %[r6], 14 \n\t"
+ "sh %[r0], 0(%[outBuf]) \n\t"
+ "sh %[r2], 2(%[outBuf]) \n\t"
+ "sh %[r4], 4(%[outBuf]) \n\t"
+ "sh %[r6], 6(%[outBuf]) \n\t"
+ "addiu %[outBuf], %[outBuf], 8 \n\t"
+ "b 1b \n\t"
+ " addiu %[iters], %[iters], -1 \n\t"
+ "2: \n\t"
+ "andi %[after], %[anaLen], 3 \n\t"
+ "3: \n\t"
+ "blez %[after], 4f \n\t"
+ " nop \n\t"
+ "lh %[r0], 0(%[window]) \n\t"
+ "lh %[r1], 0(%[anaBuf]) \n\t"
+ "mul %[r0], %[r0], %[r1] \n\t"
+ "addiu %[window], %[window], 2 \n\t"
+ "addiu %[anaBuf], %[anaBuf], 2 \n\t"
+ "addiu %[outBuf], %[outBuf], 2 \n\t"
+ "addiu %[r0], %[r0], 0x2000 \n\t"
+ "sra %[r0], %[r0], 14 \n\t"
+ "sh %[r0], -2(%[outBuf]) \n\t"
+ "b 3b \n\t"
+ " addiu %[after], %[after], -1 \n\t"
+ "4: \n\t"
+ ".set pop \n\t"
+ : [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2),
+ [r3] "=&r" (r3), [r4] "=&r" (r4), [r5] "=&r" (r5),
+ [r6] "=&r" (r6), [r7] "=&r" (r7), [iters] "=&r" (iters),
+ [after] "=&r" (after), [window] "+r" (window),
+ [anaBuf] "+r" (anaBuf), [outBuf] "+r" (outBuf)
+ : [anaLen] "r" (anaLen)
+ : "memory", "hi", "lo"
+ );
+#endif
+}
+
+// For the noise supression process, synthesis, read out fully processed
+// segment, and update synthesis buffer.
+void WebRtcNsx_SynthesisUpdate_mips(NsxInst_t* inst,
+ int16_t* out_frame,
+ int16_t gain_factor) {
+
+ int iters = inst->blockLen10ms >> 2;
+ int after = inst->blockLen10ms & 3;
+ int r0, r1, r2, r3, r4, r5, r6, r7;
+ int16_t *window = (int16_t*)inst->window;
+ int16_t *real = inst->real;
+ int16_t *synthBuf = inst->synthesisBuffer;
+ int16_t *out = out_frame;
+ int sat_pos = 0x7fff;
+ int sat_neg = 0xffff8000;
+ int block10 = (int)inst->blockLen10ms;
+ int anaLen = (int)inst->anaLen;
+
+ __asm __volatile(
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "1: \n\t"
+ "blez %[iters], 2f \n\t"
+ " nop \n\t"
+ "lh %[r0], 0(%[window]) \n\t"
+ "lh %[r1], 0(%[real]) \n\t"
+ "lh %[r2], 2(%[window]) \n\t"
+ "lh %[r3], 2(%[real]) \n\t"
+ "lh %[r4], 4(%[window]) \n\t"
+ "lh %[r5], 4(%[real]) \n\t"
+ "lh %[r6], 6(%[window]) \n\t"
+ "lh %[r7], 6(%[real]) \n\t"
+ "mul %[r0], %[r0], %[r1] \n\t"
+ "mul %[r2], %[r2], %[r3] \n\t"
+ "mul %[r4], %[r4], %[r5] \n\t"
+ "mul %[r6], %[r6], %[r7] \n\t"
+ "addiu %[r0], %[r0], 0x2000 \n\t"
+ "addiu %[r2], %[r2], 0x2000 \n\t"
+ "addiu %[r4], %[r4], 0x2000 \n\t"
+ "addiu %[r6], %[r6], 0x2000 \n\t"
+ "sra %[r0], %[r0], 14 \n\t"
+ "sra %[r2], %[r2], 14 \n\t"
+ "sra %[r4], %[r4], 14 \n\t"
+ "sra %[r6], %[r6], 14 \n\t"
+ "mul %[r0], %[r0], %[gain_factor] \n\t"
+ "mul %[r2], %[r2], %[gain_factor] \n\t"
+ "mul %[r4], %[r4], %[gain_factor] \n\t"
+ "mul %[r6], %[r6], %[gain_factor] \n\t"
+ "addiu %[r0], %[r0], 0x1000 \n\t"
+ "addiu %[r2], %[r2], 0x1000 \n\t"
+ "addiu %[r4], %[r4], 0x1000 \n\t"
+ "addiu %[r6], %[r6], 0x1000 \n\t"
+ "sra %[r0], %[r0], 13 \n\t"
+ "sra %[r2], %[r2], 13 \n\t"
+ "sra %[r4], %[r4], 13 \n\t"
+ "sra %[r6], %[r6], 13 \n\t"
+ "slt %[r1], %[r0], %[sat_pos] \n\t"
+ "slt %[r3], %[r2], %[sat_pos] \n\t"
+ "slt %[r5], %[r4], %[sat_pos] \n\t"
+ "slt %[r7], %[r6], %[sat_pos] \n\t"
+ "movz %[r0], %[sat_pos], %[r1] \n\t"
+ "movz %[r2], %[sat_pos], %[r3] \n\t"
+ "movz %[r4], %[sat_pos], %[r5] \n\t"
+ "movz %[r6], %[sat_pos], %[r7] \n\t"
+ "lh %[r1], 0(%[synthBuf]) \n\t"
+ "lh %[r3], 2(%[synthBuf]) \n\t"
+ "lh %[r5], 4(%[synthBuf]) \n\t"
+ "lh %[r7], 6(%[synthBuf]) \n\t"
+ "addu %[r0], %[r0], %[r1] \n\t"
+ "addu %[r2], %[r2], %[r3] \n\t"
+ "addu %[r4], %[r4], %[r5] \n\t"
+ "addu %[r6], %[r6], %[r7] \n\t"
+ "slt %[r1], %[r0], %[sat_pos] \n\t"
+ "slt %[r3], %[r2], %[sat_pos] \n\t"
+ "slt %[r5], %[r4], %[sat_pos] \n\t"
+ "slt %[r7], %[r6], %[sat_pos] \n\t"
+ "movz %[r0], %[sat_pos], %[r1] \n\t"
+ "movz %[r2], %[sat_pos], %[r3] \n\t"
+ "movz %[r4], %[sat_pos], %[r5] \n\t"
+ "movz %[r6], %[sat_pos], %[r7] \n\t"
+ "slt %[r1], %[r0], %[sat_neg] \n\t"
+ "slt %[r3], %[r2], %[sat_neg] \n\t"
+ "slt %[r5], %[r4], %[sat_neg] \n\t"
+ "slt %[r7], %[r6], %[sat_neg] \n\t"
+ "movn %[r0], %[sat_neg], %[r1] \n\t"
+ "movn %[r2], %[sat_neg], %[r3] \n\t"
+ "movn %[r4], %[sat_neg], %[r5] \n\t"
+ "movn %[r6], %[sat_neg], %[r7] \n\t"
+ "sh %[r0], 0(%[synthBuf]) \n\t"
+ "sh %[r2], 2(%[synthBuf]) \n\t"
+ "sh %[r4], 4(%[synthBuf]) \n\t"
+ "sh %[r6], 6(%[synthBuf]) \n\t"
+ "sh %[r0], 0(%[out]) \n\t"
+ "sh %[r2], 2(%[out]) \n\t"
+ "sh %[r4], 4(%[out]) \n\t"
+ "sh %[r6], 6(%[out]) \n\t"
+ "addiu %[window], %[window], 8 \n\t"
+ "addiu %[real], %[real], 8 \n\t"
+ "addiu %[synthBuf],%[synthBuf], 8 \n\t"
+ "addiu %[out], %[out], 8 \n\t"
+ "b 1b \n\t"
+ " addiu %[iters], %[iters], -1 \n\t"
+ "2: \n\t"
+ "blez %[after], 3f \n\t"
+ " subu %[block10], %[anaLen], %[block10] \n\t"
+ "lh %[r0], 0(%[window]) \n\t"
+ "lh %[r1], 0(%[real]) \n\t"
+ "mul %[r0], %[r0], %[r1] \n\t"
+ "addiu %[window], %[window], 2 \n\t"
+ "addiu %[real], %[real], 2 \n\t"
+ "addiu %[r0], %[r0], 0x2000 \n\t"
+ "sra %[r0], %[r0], 14 \n\t"
+ "mul %[r0], %[r0], %[gain_factor] \n\t"
+ "addiu %[r0], %[r0], 0x1000 \n\t"
+ "sra %[r0], %[r0], 13 \n\t"
+ "slt %[r1], %[r0], %[sat_pos] \n\t"
+ "movz %[r0], %[sat_pos], %[r1] \n\t"
+ "lh %[r1], 0(%[synthBuf]) \n\t"
+ "addu %[r0], %[r0], %[r1] \n\t"
+ "slt %[r1], %[r0], %[sat_pos] \n\t"
+ "movz %[r0], %[sat_pos], %[r1] \n\t"
+ "slt %[r1], %[r0], %[sat_neg] \n\t"
+ "movn %[r0], %[sat_neg], %[r1] \n\t"
+ "sh %[r0], 0(%[synthBuf]) \n\t"
+ "sh %[r0], 0(%[out]) \n\t"
+ "addiu %[synthBuf],%[synthBuf], 2 \n\t"
+ "addiu %[out], %[out], 2 \n\t"
+ "b 2b \n\t"
+ " addiu %[after], %[after], -1 \n\t"
+ "3: \n\t"
+ "sra %[iters], %[block10], 2 \n\t"
+ "4: \n\t"
+ "blez %[iters], 5f \n\t"
+ " andi %[after], %[block10], 3 \n\t"
+ "lh %[r0], 0(%[window]) \n\t"
+ "lh %[r1], 0(%[real]) \n\t"
+ "lh %[r2], 2(%[window]) \n\t"
+ "lh %[r3], 2(%[real]) \n\t"
+ "lh %[r4], 4(%[window]) \n\t"
+ "lh %[r5], 4(%[real]) \n\t"
+ "lh %[r6], 6(%[window]) \n\t"
+ "lh %[r7], 6(%[real]) \n\t"
+ "mul %[r0], %[r0], %[r1] \n\t"
+ "mul %[r2], %[r2], %[r3] \n\t"
+ "mul %[r4], %[r4], %[r5] \n\t"
+ "mul %[r6], %[r6], %[r7] \n\t"
+ "addiu %[r0], %[r0], 0x2000 \n\t"
+ "addiu %[r2], %[r2], 0x2000 \n\t"
+ "addiu %[r4], %[r4], 0x2000 \n\t"
+ "addiu %[r6], %[r6], 0x2000 \n\t"
+ "sra %[r0], %[r0], 14 \n\t"
+ "sra %[r2], %[r2], 14 \n\t"
+ "sra %[r4], %[r4], 14 \n\t"
+ "sra %[r6], %[r6], 14 \n\t"
+ "mul %[r0], %[r0], %[gain_factor] \n\t"
+ "mul %[r2], %[r2], %[gain_factor] \n\t"
+ "mul %[r4], %[r4], %[gain_factor] \n\t"
+ "mul %[r6], %[r6], %[gain_factor] \n\t"
+ "addiu %[r0], %[r0], 0x1000 \n\t"
+ "addiu %[r2], %[r2], 0x1000 \n\t"
+ "addiu %[r4], %[r4], 0x1000 \n\t"
+ "addiu %[r6], %[r6], 0x1000 \n\t"
+ "sra %[r0], %[r0], 13 \n\t"
+ "sra %[r2], %[r2], 13 \n\t"
+ "sra %[r4], %[r4], 13 \n\t"
+ "sra %[r6], %[r6], 13 \n\t"
+ "slt %[r1], %[r0], %[sat_pos] \n\t"
+ "slt %[r3], %[r2], %[sat_pos] \n\t"
+ "slt %[r5], %[r4], %[sat_pos] \n\t"
+ "slt %[r7], %[r6], %[sat_pos] \n\t"
+ "movz %[r0], %[sat_pos], %[r1] \n\t"
+ "movz %[r2], %[sat_pos], %[r3] \n\t"
+ "movz %[r4], %[sat_pos], %[r5] \n\t"
+ "movz %[r6], %[sat_pos], %[r7] \n\t"
+ "lh %[r1], 0(%[synthBuf]) \n\t"
+ "lh %[r3], 2(%[synthBuf]) \n\t"
+ "lh %[r5], 4(%[synthBuf]) \n\t"
+ "lh %[r7], 6(%[synthBuf]) \n\t"
+ "addu %[r0], %[r0], %[r1] \n\t"
+ "addu %[r2], %[r2], %[r3] \n\t"
+ "addu %[r4], %[r4], %[r5] \n\t"
+ "addu %[r6], %[r6], %[r7] \n\t"
+ "slt %[r1], %[r0], %[sat_pos] \n\t"
+ "slt %[r3], %[r2], %[sat_pos] \n\t"
+ "slt %[r5], %[r4], %[sat_pos] \n\t"
+ "slt %[r7], %[r6], %[sat_pos] \n\t"
+ "movz %[r0], %[sat_pos], %[r1] \n\t"
+ "movz %[r2], %[sat_pos], %[r3] \n\t"
+ "movz %[r4], %[sat_pos], %[r5] \n\t"
+ "movz %[r6], %[sat_pos], %[r7] \n\t"
+ "slt %[r1], %[r0], %[sat_neg] \n\t"
+ "slt %[r3], %[r2], %[sat_neg] \n\t"
+ "slt %[r5], %[r4], %[sat_neg] \n\t"
+ "slt %[r7], %[r6], %[sat_neg] \n\t"
+ "movn %[r0], %[sat_neg], %[r1] \n\t"
+ "movn %[r2], %[sat_neg], %[r3] \n\t"
+ "movn %[r4], %[sat_neg], %[r5] \n\t"
+ "movn %[r6], %[sat_neg], %[r7] \n\t"
+ "sh %[r0], 0(%[synthBuf]) \n\t"
+ "sh %[r2], 2(%[synthBuf]) \n\t"
+ "sh %[r4], 4(%[synthBuf]) \n\t"
+ "sh %[r6], 6(%[synthBuf]) \n\t"
+ "addiu %[window], %[window], 8 \n\t"
+ "addiu %[real], %[real], 8 \n\t"
+ "addiu %[synthBuf],%[synthBuf], 8 \n\t"
+ "b 4b \n\t"
+ " addiu %[iters], %[iters], -1 \n\t"
+ "5: \n\t"
+ "blez %[after], 6f \n\t"
+ " nop \n\t"
+ "lh %[r0], 0(%[window]) \n\t"
+ "lh %[r1], 0(%[real]) \n\t"
+ "mul %[r0], %[r0], %[r1] \n\t"
+ "addiu %[window], %[window], 2 \n\t"
+ "addiu %[real], %[real], 2 \n\t"
+ "addiu %[r0], %[r0], 0x2000 \n\t"
+ "sra %[r0], %[r0], 14 \n\t"
+ "mul %[r0], %[r0], %[gain_factor] \n\t"
+ "addiu %[r0], %[r0], 0x1000 \n\t"
+ "sra %[r0], %[r0], 13 \n\t"
+ "slt %[r1], %[r0], %[sat_pos] \n\t"
+ "movz %[r0], %[sat_pos], %[r1] \n\t"
+ "lh %[r1], 0(%[synthBuf]) \n\t"
+ "addu %[r0], %[r0], %[r1] \n\t"
+ "slt %[r1], %[r0], %[sat_pos] \n\t"
+ "movz %[r0], %[sat_pos], %[r1] \n\t"
+ "slt %[r1], %[r0], %[sat_neg] \n\t"
+ "movn %[r0], %[sat_neg], %[r1] \n\t"
+ "sh %[r0], 0(%[synthBuf]) \n\t"
+ "addiu %[synthBuf],%[synthBuf], 2 \n\t"
+ "b 2b \n\t"
+ " addiu %[after], %[after], -1 \n\t"
+ "6: \n\t"
+ ".set pop \n\t"
+ : [r0] "=&r" (r0), [r1] "=&r" (r1), [r2] "=&r" (r2),
+ [r3] "=&r" (r3), [r4] "=&r" (r4), [r5] "=&r" (r5),
+ [r6] "=&r" (r6), [r7] "=&r" (r7), [iters] "+r" (iters),
+ [after] "+r" (after), [block10] "+r" (block10),
+ [window] "+r" (window), [real] "+r" (real),
+ [synthBuf] "+r" (synthBuf), [out] "+r" (out)
+ : [gain_factor] "r" (gain_factor), [sat_pos] "r" (sat_pos),
+ [sat_neg] "r" (sat_neg), [anaLen] "r" (anaLen)
+ : "memory", "hi", "lo"
+ );
+
+ // update synthesis buffer
+ WEBRTC_SPL_MEMCPY_W16(inst->synthesisBuffer,
+ inst->synthesisBuffer + inst->blockLen10ms,
+ inst->anaLen - inst->blockLen10ms);
+ WebRtcSpl_ZerosArrayW16(inst->synthesisBuffer
+ + inst->anaLen - inst->blockLen10ms, inst->blockLen10ms);
+}
+
+// Filter the data in the frequency domain, and create spectrum.
+void WebRtcNsx_PrepareSpectrum_mips(NsxInst_t* inst, int16_t* freq_buf) {
+
+ uint16_t *noiseSupFilter = inst->noiseSupFilter;
+ int16_t *real = inst->real;
+ int16_t *imag = inst->imag;
+ int32_t loop_count = 2;
+ int16_t tmp_1, tmp_2, tmp_3, tmp_4, tmp_5, tmp_6;
+ int16_t tmp16 = (inst->anaLen << 1) - 4;
+ int16_t* freq_buf_f = freq_buf;
+ int16_t* freq_buf_s = &freq_buf[tmp16];
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ //first sample
+ "lh %[tmp_1], 0(%[noiseSupFilter]) \n\t"
+ "lh %[tmp_2], 0(%[real]) \n\t"
+ "lh %[tmp_3], 0(%[imag]) \n\t"
+ "mul %[tmp_2], %[tmp_2], %[tmp_1] \n\t"
+ "mul %[tmp_3], %[tmp_3], %[tmp_1] \n\t"
+ "sra %[tmp_2], %[tmp_2], 14 \n\t"
+ "sra %[tmp_3], %[tmp_3], 14 \n\t"
+ "sh %[tmp_2], 0(%[real]) \n\t"
+ "sh %[tmp_3], 0(%[imag]) \n\t"
+ "negu %[tmp_3], %[tmp_3] \n\t"
+ "sh %[tmp_2], 0(%[freq_buf_f]) \n\t"
+ "sh %[tmp_3], 2(%[freq_buf_f]) \n\t"
+ "addiu %[real], %[real], 2 \n\t"
+ "addiu %[imag], %[imag], 2 \n\t"
+ "addiu %[noiseSupFilter], %[noiseSupFilter], 2 \n\t"
+ "addiu %[freq_buf_f], %[freq_buf_f], 4 \n\t"
+ "1: \n\t"
+ "lh %[tmp_1], 0(%[noiseSupFilter]) \n\t"
+ "lh %[tmp_2], 0(%[real]) \n\t"
+ "lh %[tmp_3], 0(%[imag]) \n\t"
+ "lh %[tmp_4], 2(%[noiseSupFilter]) \n\t"
+ "lh %[tmp_5], 2(%[real]) \n\t"
+ "lh %[tmp_6], 2(%[imag]) \n\t"
+ "mul %[tmp_2], %[tmp_2], %[tmp_1] \n\t"
+ "mul %[tmp_3], %[tmp_3], %[tmp_1] \n\t"
+ "mul %[tmp_5], %[tmp_5], %[tmp_4] \n\t"
+ "mul %[tmp_6], %[tmp_6], %[tmp_4] \n\t"
+ "addiu %[loop_count], %[loop_count], 2 \n\t"
+ "sra %[tmp_2], %[tmp_2], 14 \n\t"
+ "sra %[tmp_3], %[tmp_3], 14 \n\t"
+ "sra %[tmp_5], %[tmp_5], 14 \n\t"
+ "sra %[tmp_6], %[tmp_6], 14 \n\t"
+ "addiu %[noiseSupFilter], %[noiseSupFilter], 4 \n\t"
+ "sh %[tmp_2], 0(%[real]) \n\t"
+ "sh %[tmp_2], 4(%[freq_buf_s]) \n\t"
+ "sh %[tmp_3], 0(%[imag]) \n\t"
+ "sh %[tmp_3], 6(%[freq_buf_s]) \n\t"
+ "negu %[tmp_3], %[tmp_3] \n\t"
+ "sh %[tmp_5], 2(%[real]) \n\t"
+ "sh %[tmp_5], 0(%[freq_buf_s]) \n\t"
+ "sh %[tmp_6], 2(%[imag]) \n\t"
+ "sh %[tmp_6], 2(%[freq_buf_s]) \n\t"
+ "negu %[tmp_6], %[tmp_6] \n\t"
+ "addiu %[freq_buf_s], %[freq_buf_s], -8 \n\t"
+ "addiu %[real], %[real], 4 \n\t"
+ "addiu %[imag], %[imag], 4 \n\t"
+ "sh %[tmp_2], 0(%[freq_buf_f]) \n\t"
+ "sh %[tmp_3], 2(%[freq_buf_f]) \n\t"
+ "sh %[tmp_5], 4(%[freq_buf_f]) \n\t"
+ "sh %[tmp_6], 6(%[freq_buf_f]) \n\t"
+ "blt %[loop_count], %[loop_size], 1b \n\t"
+ " addiu %[freq_buf_f], %[freq_buf_f], 8 \n\t"
+ //last two samples:
+ "lh %[tmp_1], 0(%[noiseSupFilter]) \n\t"
+ "lh %[tmp_2], 0(%[real]) \n\t"
+ "lh %[tmp_3], 0(%[imag]) \n\t"
+ "lh %[tmp_4], 2(%[noiseSupFilter]) \n\t"
+ "lh %[tmp_5], 2(%[real]) \n\t"
+ "lh %[tmp_6], 2(%[imag]) \n\t"
+ "mul %[tmp_2], %[tmp_2], %[tmp_1] \n\t"
+ "mul %[tmp_3], %[tmp_3], %[tmp_1] \n\t"
+ "mul %[tmp_5], %[tmp_5], %[tmp_4] \n\t"
+ "mul %[tmp_6], %[tmp_6], %[tmp_4] \n\t"
+ "sra %[tmp_2], %[tmp_2], 14 \n\t"
+ "sra %[tmp_3], %[tmp_3], 14 \n\t"
+ "sra %[tmp_5], %[tmp_5], 14 \n\t"
+ "sra %[tmp_6], %[tmp_6], 14 \n\t"
+ "sh %[tmp_2], 0(%[real]) \n\t"
+ "sh %[tmp_2], 4(%[freq_buf_s]) \n\t"
+ "sh %[tmp_3], 0(%[imag]) \n\t"
+ "sh %[tmp_3], 6(%[freq_buf_s]) \n\t"
+ "negu %[tmp_3], %[tmp_3] \n\t"
+ "sh %[tmp_2], 0(%[freq_buf_f]) \n\t"
+ "sh %[tmp_3], 2(%[freq_buf_f]) \n\t"
+ "sh %[tmp_5], 4(%[freq_buf_f]) \n\t"
+ "sh %[tmp_6], 6(%[freq_buf_f]) \n\t"
+ "sh %[tmp_5], 2(%[real]) \n\t"
+ "sh %[tmp_6], 2(%[imag]) \n\t"
+ ".set pop \n\t"
+ : [real] "+r" (real), [imag] "+r" (imag),
+ [freq_buf_f] "+r" (freq_buf_f), [freq_buf_s] "+r" (freq_buf_s),
+ [loop_count] "+r" (loop_count), [noiseSupFilter] "+r" (noiseSupFilter),
+ [tmp_1] "=&r" (tmp_1), [tmp_2] "=&r" (tmp_2), [tmp_3] "=&r" (tmp_3),
+ [tmp_4] "=&r" (tmp_4), [tmp_5] "=&r" (tmp_5), [tmp_6] "=&r" (tmp_6)
+ : [loop_size] "r" (inst->anaLen2)
+ : "memory", "hi", "lo"
+ );
+}
+
+#if defined(MIPS_DSP_R1_LE)
+// Denormalize the real-valued signal |in|, the output from inverse FFT.
+void WebRtcNsx_Denormalize_mips(NsxInst_t* inst, int16_t* in, int factor) {
+ int32_t r0, r1, r2, r3, t0;
+ int len = inst->anaLen;
+ int16_t *out = &inst->real[0];
+ int shift = factor - inst->normData;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "beqz %[len], 8f \n\t"
+ " nop \n\t"
+ "bltz %[shift], 4f \n\t"
+ " sra %[t0], %[len], 2 \n\t"
+ "beqz %[t0], 2f \n\t"
+ " andi %[len], %[len], 3 \n\t"
+ "1: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 2(%[in]) \n\t"
+ "lh %[r2], 4(%[in]) \n\t"
+ "lh %[r3], 6(%[in]) \n\t"
+ "shllv_s.ph %[r0], %[r0], %[shift] \n\t"
+ "shllv_s.ph %[r1], %[r1], %[shift] \n\t"
+ "shllv_s.ph %[r2], %[r2], %[shift] \n\t"
+ "shllv_s.ph %[r3], %[r3], %[shift] \n\t"
+ "addiu %[in], %[in], 8 \n\t"
+ "addiu %[t0], %[t0], -1 \n\t"
+ "sh %[r0], 0(%[out]) \n\t"
+ "sh %[r1], 2(%[out]) \n\t"
+ "sh %[r2], 4(%[out]) \n\t"
+ "sh %[r3], 6(%[out]) \n\t"
+ "bgtz %[t0], 1b \n\t"
+ " addiu %[out], %[out], 8 \n\t"
+ "2: \n\t"
+ "beqz %[len], 8f \n\t"
+ " nop \n\t"
+ "3: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "addiu %[in], %[in], 2 \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "shllv_s.ph %[r0], %[r0], %[shift] \n\t"
+ "addiu %[out], %[out], 2 \n\t"
+ "bgtz %[len], 3b \n\t"
+ " sh %[r0], -2(%[out]) \n\t"
+ "b 8f \n\t"
+ "4: \n\t"
+ "negu %[shift], %[shift] \n\t"
+ "beqz %[t0], 6f \n\t"
+ " andi %[len], %[len], 3 \n\t"
+ "5: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 2(%[in]) \n\t"
+ "lh %[r2], 4(%[in]) \n\t"
+ "lh %[r3], 6(%[in]) \n\t"
+ "srav %[r0], %[r0], %[shift] \n\t"
+ "srav %[r1], %[r1], %[shift] \n\t"
+ "srav %[r2], %[r2], %[shift] \n\t"
+ "srav %[r3], %[r3], %[shift] \n\t"
+ "addiu %[in], %[in], 8 \n\t"
+ "addiu %[t0], %[t0], -1 \n\t"
+ "sh %[r0], 0(%[out]) \n\t"
+ "sh %[r1], 2(%[out]) \n\t"
+ "sh %[r2], 4(%[out]) \n\t"
+ "sh %[r3], 6(%[out]) \n\t"
+ "bgtz %[t0], 5b \n\t"
+ " addiu %[out], %[out], 8 \n\t"
+ "6: \n\t"
+ "beqz %[len], 8f \n\t"
+ " nop \n\t"
+ "7: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "addiu %[in], %[in], 2 \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "srav %[r0], %[r0], %[shift] \n\t"
+ "addiu %[out], %[out], 2 \n\t"
+ "bgtz %[len], 7b \n\t"
+ " sh %[r0], -2(%[out]) \n\t"
+ "8: \n\t"
+ ".set pop \n\t"
+ : [t0] "=&r" (t0), [r0] "=&r" (r0), [r1] "=&r" (r1),
+ [r2] "=&r" (r2), [r3] "=&r" (r3)
+ : [len] "r" (len), [shift] "r" (shift), [in] "r" (in),
+ [out] "r" (out)
+ : "memory"
+ );
+}
+#endif
+
+// Normalize the real-valued signal |in|, the input to forward FFT.
+void WebRtcNsx_NormalizeRealBuffer_mips(NsxInst_t* inst,
+ const int16_t* in,
+ int16_t* out) {
+ int32_t r0, r1, r2, r3, t0;
+ int len = inst->anaLen;
+ int shift = inst->normData;
+
+ __asm __volatile (
+ ".set push \n\t"
+ ".set noreorder \n\t"
+ "beqz %[len], 4f \n\t"
+ " sra %[t0], %[len], 2 \n\t"
+ "beqz %[t0], 2f \n\t"
+ " andi %[len], %[len], 3 \n\t"
+ "1: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "lh %[r1], 2(%[in]) \n\t"
+ "lh %[r2], 4(%[in]) \n\t"
+ "lh %[r3], 6(%[in]) \n\t"
+ "sllv %[r0], %[r0], %[shift] \n\t"
+ "sllv %[r1], %[r1], %[shift] \n\t"
+ "sllv %[r2], %[r2], %[shift] \n\t"
+ "sllv %[r3], %[r3], %[shift] \n\t"
+ "addiu %[in], %[in], 8 \n\t"
+ "addiu %[t0], %[t0], -1 \n\t"
+ "sh %[r0], 0(%[out]) \n\t"
+ "sh %[r1], 2(%[out]) \n\t"
+ "sh %[r2], 4(%[out]) \n\t"
+ "sh %[r3], 6(%[out]) \n\t"
+ "bgtz %[t0], 1b \n\t"
+ " addiu %[out], %[out], 8 \n\t"
+ "2: \n\t"
+ "beqz %[len], 4f \n\t"
+ " nop \n\t"
+ "3: \n\t"
+ "lh %[r0], 0(%[in]) \n\t"
+ "addiu %[in], %[in], 2 \n\t"
+ "addiu %[len], %[len], -1 \n\t"
+ "sllv %[r0], %[r0], %[shift] \n\t"
+ "addiu %[out], %[out], 2 \n\t"
+ "bgtz %[len], 3b \n\t"
+ " sh %[r0], -2(%[out]) \n\t"
+ "4: \n\t"
+ ".set pop \n\t"
+ : [t0] "=&r" (t0), [r0] "=&r" (r0), [r1] "=&r" (r1),
+ [r2] "=&r" (r2), [r3] "=&r" (r3)
+ : [len] "r" (len), [shift] "r" (shift), [in] "r" (in),
+ [out] "r" (out)
+ : "memory"
+ );
+}
+
diff --git a/chromium/third_party/webrtc/modules/audio_processing/processing_component.cc b/chromium/third_party/webrtc/modules/audio_processing/processing_component.cc
index 23bf22570b1..9e16d7c4eea 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/processing_component.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/processing_component.cc
@@ -12,15 +12,12 @@
#include <assert.h>
-#include "webrtc/modules/audio_processing/audio_processing_impl.h"
+#include "webrtc/modules/audio_processing/include/audio_processing.h"
namespace webrtc {
-ProcessingComponent::ProcessingComponent() {}
-
-ProcessingComponent::ProcessingComponent(const AudioProcessingImpl* apm)
- : apm_(apm),
- initialized_(false),
+ProcessingComponent::ProcessingComponent()
+ : initialized_(false),
enabled_(false),
num_handles_(0) {}
@@ -35,7 +32,7 @@ int ProcessingComponent::Destroy() {
}
initialized_ = false;
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
int ProcessingComponent::EnableComponent(bool enable) {
@@ -43,7 +40,7 @@ int ProcessingComponent::EnableComponent(bool enable) {
enabled_ = enable; // Must be set before Initialize() is called.
int err = Initialize();
- if (err != apm_->kNoError) {
+ if (err != AudioProcessing::kNoError) {
enabled_ = false;
return err;
}
@@ -51,7 +48,7 @@ int ProcessingComponent::EnableComponent(bool enable) {
enabled_ = enable;
}
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
bool ProcessingComponent::is_component_enabled() const {
@@ -69,7 +66,7 @@ int ProcessingComponent::num_handles() const {
int ProcessingComponent::Initialize() {
if (!enabled_) {
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
num_handles_ = num_handles_required();
@@ -82,12 +79,12 @@ int ProcessingComponent::Initialize() {
if (handles_[i] == NULL) {
handles_[i] = CreateHandle();
if (handles_[i] == NULL) {
- return apm_->kCreationFailedError;
+ return AudioProcessing::kCreationFailedError;
}
}
int err = InitializeHandle(handles_[i]);
- if (err != apm_->kNoError) {
+ if (err != AudioProcessing::kNoError) {
return GetHandleError(handles_[i]);
}
}
@@ -98,17 +95,17 @@ int ProcessingComponent::Initialize() {
int ProcessingComponent::Configure() {
if (!initialized_) {
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
assert(static_cast<int>(handles_.size()) >= num_handles_);
for (int i = 0; i < num_handles_; i++) {
int err = ConfigureHandle(handles_[i]);
- if (err != apm_->kNoError) {
+ if (err != AudioProcessing::kNoError) {
return GetHandleError(handles_[i]);
}
}
- return apm_->kNoError;
+ return AudioProcessing::kNoError;
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/processing_component.h b/chromium/third_party/webrtc/modules/audio_processing/processing_component.h
index c090d222456..8ee3ac6c7db 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/processing_component.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/processing_component.h
@@ -13,16 +13,13 @@
#include <vector>
-#include "webrtc/modules/audio_processing/include/audio_processing.h"
+#include "webrtc/common.h"
namespace webrtc {
-class AudioProcessingImpl;
-
class ProcessingComponent {
public:
ProcessingComponent();
- explicit ProcessingComponent(const AudioProcessingImpl* apm);
virtual ~ProcessingComponent();
virtual int Initialize();
@@ -41,11 +38,10 @@ class ProcessingComponent {
virtual void* CreateHandle() const = 0;
virtual int InitializeHandle(void* handle) const = 0;
virtual int ConfigureHandle(void* handle) const = 0;
- virtual int DestroyHandle(void* handle) const = 0;
+ virtual void DestroyHandle(void* handle) const = 0;
virtual int num_handles_required() const = 0;
virtual int GetHandleError(void* handle) const = 0;
- const AudioProcessingImpl* apm_;
std::vector<void*> handles_;
bool initialized_;
bool enabled_;
diff --git a/chromium/third_party/webrtc/modules/audio_processing/rms_level.cc b/chromium/third_party/webrtc/modules/audio_processing/rms_level.cc
new file mode 100644
index 00000000000..14136bf3049
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/rms_level.cc
@@ -0,0 +1,61 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_processing/rms_level.h"
+
+#include <assert.h>
+#include <math.h>
+
+namespace webrtc {
+
+static const float kMaxSquaredLevel = 32768 * 32768;
+
+RMSLevel::RMSLevel()
+ : sum_square_(0),
+ sample_count_(0) {}
+
+RMSLevel::~RMSLevel() {}
+
+void RMSLevel::Reset() {
+ sum_square_ = 0;
+ sample_count_ = 0;
+}
+
+void RMSLevel::Process(const int16_t* data, int length) {
+ for (int i = 0; i < length; ++i) {
+ sum_square_ += data[i] * data[i];
+ }
+ sample_count_ += length;
+}
+
+void RMSLevel::ProcessMuted(int length) {
+ sample_count_ += length;
+}
+
+int RMSLevel::RMS() {
+ if (sample_count_ == 0 || sum_square_ == 0) {
+ Reset();
+ return kMinLevel;
+ }
+
+ // Normalize by the max level.
+ float rms = sum_square_ / (sample_count_ * kMaxSquaredLevel);
+ // 20log_10(x^0.5) = 10log_10(x)
+ rms = 10 * log10(rms);
+ assert(rms <= 0);
+ if (rms < -kMinLevel)
+ rms = -kMinLevel;
+
+ rms = -rms;
+ Reset();
+ return static_cast<int>(rms + 0.5);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/rms_level.h b/chromium/third_party/webrtc/modules/audio_processing/rms_level.h
new file mode 100644
index 00000000000..055d271bb19
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/rms_level.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_RMS_LEVEL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_RMS_LEVEL_H_
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// Computes the root mean square (RMS) level in dBFs (decibels from digital
+// full-scale) of audio data. The computation follows RFC 6465:
+// https://tools.ietf.org/html/rfc6465
+// with the intent that it can provide the RTP audio level indication.
+//
+// The expected approach is to provide constant-sized chunks of audio to
+// Process(). When enough chunks have been accumulated to form a packet, call
+// RMS() to get the audio level indicator for the RTP header.
+class RMSLevel {
+ public:
+ static const int kMinLevel = 127;
+
+ RMSLevel();
+ ~RMSLevel();
+
+ // Can be called to reset internal states, but is not required during normal
+ // operation.
+ void Reset();
+
+ // Pass each chunk of audio to Process() to accumulate the level.
+ void Process(const int16_t* data, int length);
+
+ // If all samples with the given |length| have a magnitude of zero, this is
+ // a shortcut to avoid some computation.
+ void ProcessMuted(int length);
+
+ // Computes the RMS level over all data passed to Process() since the last
+ // call to RMS(). The returned value is positive but should be interpreted as
+ // negative as per the RFC. It is constrained to [0, 127].
+ int RMS();
+
+ private:
+ float sum_square_;
+ int sample_count_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_PROCESSING_RMS_LEVEL_H_
+
diff --git a/chromium/third_party/webrtc/modules/audio_processing/splitting_filter.cc b/chromium/third_party/webrtc/modules/audio_processing/splitting_filter.cc
deleted file mode 100644
index 372c8dc426e..00000000000
--- a/chromium/third_party/webrtc/modules/audio_processing/splitting_filter.cc
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/modules/audio_processing/splitting_filter.h"
-
-namespace webrtc {
-
-void SplittingFilterAnalysis(const int16_t* in_data,
- int16_t* low_band,
- int16_t* high_band,
- int32_t* filter_state1,
- int32_t* filter_state2)
-{
- WebRtcSpl_AnalysisQMF(in_data, low_band, high_band, filter_state1, filter_state2);
-}
-
-void SplittingFilterSynthesis(const int16_t* low_band,
- const int16_t* high_band,
- int16_t* out_data,
- int32_t* filt_state1,
- int32_t* filt_state2)
-{
- WebRtcSpl_SynthesisQMF(low_band, high_band, out_data, filt_state1, filt_state2);
-}
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/splitting_filter.h b/chromium/third_party/webrtc/modules/audio_processing/splitting_filter.h
deleted file mode 100644
index b6c851273a6..00000000000
--- a/chromium/third_party/webrtc/modules/audio_processing/splitting_filter.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_SPLITTING_FILTER_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_SPLITTING_FILTER_H_
-
-#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-/*
- * SplittingFilterbank_analysisQMF(...)
- *
- * Splits a super-wb signal into two subbands: 0-8 kHz and 8-16 kHz.
- *
- * Input:
- * - in_data : super-wb audio signal
- *
- * Input & Output:
- * - filt_state1: Filter state for first all-pass filter
- * - filt_state2: Filter state for second all-pass filter
- *
- * Output:
- * - low_band : The signal from the 0-4 kHz band
- * - high_band : The signal from the 4-8 kHz band
- */
-void SplittingFilterAnalysis(const int16_t* in_data,
- int16_t* low_band,
- int16_t* high_band,
- int32_t* filt_state1,
- int32_t* filt_state2);
-
-/*
- * SplittingFilterbank_synthesisQMF(...)
- *
- * Combines the two subbands (0-8 and 8-16 kHz) into a super-wb signal.
- *
- * Input:
- * - low_band : The signal with the 0-8 kHz band
- * - high_band : The signal with the 8-16 kHz band
- *
- * Input & Output:
- * - filt_state1: Filter state for first all-pass filter
- * - filt_state2: Filter state for second all-pass filter
- *
- * Output:
- * - out_data : super-wb speech signal
- */
-void SplittingFilterSynthesis(const int16_t* low_band,
- const int16_t* high_band,
- int16_t* out_data,
- int32_t* filt_state1,
- int32_t* filt_state2);
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_SPLITTING_FILTER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/typing_detection.cc b/chromium/third_party/webrtc/modules/audio_processing/typing_detection.cc
new file mode 100644
index 00000000000..5f5ce0abafd
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/typing_detection.cc
@@ -0,0 +1,90 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_processing/typing_detection.h"
+
+namespace webrtc {
+
+TypingDetection::TypingDetection()
+ : time_active_(0),
+ time_since_last_typing_(0),
+ penalty_counter_(0),
+ counter_since_last_detection_update_(0),
+ detection_to_report_(false),
+ new_detection_to_report_(false),
+ time_window_(10),
+ cost_per_typing_(100),
+ reporting_threshold_(300),
+ penalty_decay_(1),
+ type_event_delay_(2),
+ report_detection_update_period_(1) {
+}
+
+TypingDetection::~TypingDetection() {}
+
+bool TypingDetection::Process(bool key_pressed, bool vad_activity) {
+ if (vad_activity)
+ time_active_++;
+ else
+ time_active_ = 0;
+
+ // Keep track if time since last typing event
+ if (key_pressed)
+ time_since_last_typing_ = 0;
+ else
+ ++time_since_last_typing_;
+
+ if (time_since_last_typing_ < type_event_delay_ &&
+ vad_activity &&
+ time_active_ < time_window_) {
+ penalty_counter_ += cost_per_typing_;
+ if (penalty_counter_ > reporting_threshold_)
+ new_detection_to_report_ = true;
+ }
+
+ if (penalty_counter_ > 0)
+ penalty_counter_ -= penalty_decay_;
+
+ if (++counter_since_last_detection_update_ ==
+ report_detection_update_period_) {
+ detection_to_report_ = new_detection_to_report_;
+ new_detection_to_report_ = false;
+ counter_since_last_detection_update_ = 0;
+ }
+
+ return detection_to_report_;
+}
+
+int TypingDetection::TimeSinceLastDetectionInSeconds() {
+ // Round to whole seconds.
+ return (time_since_last_typing_ + 50) / 100;
+}
+
+void TypingDetection::SetParameters(int time_window,
+ int cost_per_typing,
+ int reporting_threshold,
+ int penalty_decay,
+ int type_event_delay,
+ int report_detection_update_period) {
+ if (time_window) time_window_ = time_window;
+
+ if (cost_per_typing) cost_per_typing_ = cost_per_typing;
+
+ if (reporting_threshold) reporting_threshold_ = reporting_threshold;
+
+ if (penalty_decay) penalty_decay_ = penalty_decay;
+
+ if (type_event_delay) type_event_delay_ = type_event_delay;
+
+ if (report_detection_update_period)
+ report_detection_update_period_ = report_detection_update_period;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/audio_processing/typing_detection.h b/chromium/third_party/webrtc/modules/audio_processing/typing_detection.h
new file mode 100644
index 00000000000..5fa6456e9e9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/audio_processing/typing_detection.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_TYPING_DETECTION_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_TYPING_DETECTION_H_
+
+#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class TypingDetection {
+ public:
+ TypingDetection();
+ virtual ~TypingDetection();
+
+ // Run the detection algortihm. Shall be called every 10 ms. Returns true if
+ // typing is detected, or false if not, based on the update period as set with
+ // SetParameters(). See |report_detection_update_period_| description below.
+ bool Process(bool key_pressed, bool vad_activity);
+
+ // Gets the time in seconds since the last detection.
+ int TimeSinceLastDetectionInSeconds();
+
+ // Sets the algorithm parameters. A parameter value of 0 leaves it unchanged.
+ // See the correspondning member variables below for descriptions.
+ void SetParameters(int time_window,
+ int cost_per_typing,
+ int reporting_threshold,
+ int penalty_decay,
+ int type_event_delay,
+ int report_detection_update_period);
+
+ private:
+ int time_active_;
+ int time_since_last_typing_;
+ int penalty_counter_;
+
+ // Counter since last time the detection status reported by Process() was
+ // updated. See also |report_detection_update_period_|.
+ int counter_since_last_detection_update_;
+
+ // The detection status to report. Updated every
+ // |report_detection_update_period_| call to Process().
+ bool detection_to_report_;
+
+ // What |detection_to_report_| should be set to next time it is updated.
+ bool new_detection_to_report_;
+
+ // Settable threshold values.
+
+ // Number of 10 ms slots accepted to count as a hit.
+ int time_window_;
+
+ // Penalty added for a typing + activity coincide.
+ int cost_per_typing_;
+
+ // Threshold for |penalty_counter_|.
+ int reporting_threshold_;
+
+ // How much we reduce |penalty_counter_| every 10 ms.
+ int penalty_decay_;
+
+ // How old typing events we allow.
+ int type_event_delay_;
+
+ // Settable update period.
+
+ // Number of 10 ms slots between each update of the detection status returned
+ // by Process(). This inertia added to the algorithm is usually desirable and
+ // provided so that consumers of the class don't have to implement that
+ // themselves if they don't wish.
+ // If set to 1, each call to Process() will return the detection status for
+ // that 10 ms slot.
+ // If set to N (where N > 1), the detection status returned from Process()
+ // will remain the same until Process() has been called N times. Then, if none
+ // of the last N calls to Process() has detected typing for each respective
+ // 10 ms slot, Process() will return false. If at least one of the last N
+ // calls has detected typing, Process() will return true. And that returned
+ // status will then remain the same until the next N calls have been done.
+ int report_detection_update_period_;
+};
+
+} // namespace webrtc
+
+#endif // #ifndef WEBRTC_MODULES_AUDIO_PROCESSING_TYPING_DETECTION_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.c b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.c
index 6d6e9bc97bb..3b2043267ff 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.c
@@ -30,10 +30,6 @@ static const float kMinHistogramThreshold = 1.5f;
static const int kMinRequiredHits = 10;
static const int kMaxHitsWhenPossiblyNonCausal = 10;
static const int kMaxHitsWhenPossiblyCausal = 1000;
-// TODO(bjornv): Make kMaxDelayDifference a configurable parameter, since it
-// corresponds to the filter length if the delay estimation is used in echo
-// control.
-static const int kMaxDelayDifference = 32;
static const float kQ14Scaling = 1.f / (1 << 14); // Scaling by 2^14 to get Q0.
static const float kFractionSlope = 0.05f;
static const float kMinFractionWhenPossiblyCausal = 0.5f;
@@ -195,8 +191,8 @@ static int HistogramBasedValidation(const BinaryDelayEstimator* self,
// depending on the distance between the |candidate_delay| and |last_delay|.
// TODO(bjornv): How much can we gain by turning the fraction calculation
// into tables?
- if (delay_difference >= kMaxDelayDifference) {
- fraction = 1.f - kFractionSlope * (delay_difference - kMaxDelayDifference);
+ if (delay_difference > self->allowed_offset) {
+ fraction = 1.f - kFractionSlope * (delay_difference - self->allowed_offset);
fraction = (fraction > kMinFractionWhenPossiblyCausal ? fraction :
kMinFractionWhenPossiblyCausal);
} else if (delay_difference < 0) {
@@ -308,6 +304,39 @@ void WebRtc_InitBinaryDelayEstimatorFarend(BinaryDelayEstimatorFarend* self) {
memset(self->far_bit_counts, 0, sizeof(int) * self->history_size);
}
+void WebRtc_SoftResetBinaryDelayEstimatorFarend(
+ BinaryDelayEstimatorFarend* self, int delay_shift) {
+ int abs_shift = abs(delay_shift);
+ int shift_size = 0;
+ int dest_index = 0;
+ int src_index = 0;
+ int padding_index = 0;
+
+ assert(self != NULL);
+ shift_size = self->history_size - abs_shift;
+ assert(shift_size > 0);
+ if (delay_shift == 0) {
+ return;
+ } else if (delay_shift > 0) {
+ dest_index = abs_shift;
+ } else if (delay_shift < 0) {
+ src_index = abs_shift;
+ padding_index = shift_size;
+ }
+
+ // Shift and zero pad buffers.
+ memmove(&self->binary_far_history[dest_index],
+ &self->binary_far_history[src_index],
+ sizeof(*self->binary_far_history) * shift_size);
+ memset(&self->binary_far_history[padding_index], 0,
+ sizeof(*self->binary_far_history) * abs_shift);
+ memmove(&self->far_bit_counts[dest_index],
+ &self->far_bit_counts[src_index],
+ sizeof(*self->far_bit_counts) * shift_size);
+ memset(&self->far_bit_counts[padding_index], 0,
+ sizeof(*self->far_bit_counts) * abs_shift);
+}
+
void WebRtc_AddBinaryFarSpectrum(BinaryDelayEstimatorFarend* handle,
uint32_t binary_far_spectrum) {
assert(handle != NULL);
@@ -349,10 +378,10 @@ void WebRtc_FreeBinaryDelayEstimator(BinaryDelayEstimator* self) {
}
BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator(
- BinaryDelayEstimatorFarend* farend, int lookahead) {
+ BinaryDelayEstimatorFarend* farend, int max_lookahead) {
BinaryDelayEstimator* self = NULL;
- if ((farend != NULL) && (lookahead >= 0)) {
+ if ((farend != NULL) && (max_lookahead >= 0)) {
// Sanity conditions fulfilled.
self = malloc(sizeof(BinaryDelayEstimator));
}
@@ -361,7 +390,11 @@ BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator(
int malloc_fail = 0;
self->farend = farend;
- self->near_history_size = lookahead + 1;
+ self->near_history_size = max_lookahead + 1;
+ self->robust_validation_enabled = 0; // Disabled by default.
+ self->allowed_offset = 0;
+
+ self->lookahead = max_lookahead;
// Allocate memory for spectrum buffers. The extra array element in
// |mean_bit_counts| and |histogram| is a dummy element only used while
@@ -374,7 +407,7 @@ BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator(
malloc_fail |= (self->bit_counts == NULL);
// Allocate memory for history buffers.
- self->binary_near_history = malloc((lookahead + 1) * sizeof(uint32_t));
+ self->binary_near_history = malloc((max_lookahead + 1) * sizeof(uint32_t));
malloc_fail |= (self->binary_near_history == NULL);
self->histogram = malloc((farend->history_size + 1) * sizeof(float));
@@ -400,26 +433,40 @@ void WebRtc_InitBinaryDelayEstimator(BinaryDelayEstimator* self) {
self->mean_bit_counts[i] = (20 << 9); // 20 in Q9.
self->histogram[i] = 0.f;
}
- self->minimum_probability = (32 << 9); // 32 in Q9.
- self->last_delay_probability = (32 << 9); // 32 in Q9.
+ self->minimum_probability = kMaxBitCountsQ9; // 32 in Q9.
+ self->last_delay_probability = (int) kMaxBitCountsQ9; // 32 in Q9.
// Default return value if we're unable to estimate. -1 is used for errors.
self->last_delay = -2;
- self->robust_validation_enabled = 0; // Disabled by default.
self->last_candidate_delay = -2;
self->compare_delay = self->farend->history_size;
self->candidate_hits = 0;
self->last_delay_histogram = 0.f;
}
+int WebRtc_SoftResetBinaryDelayEstimator(BinaryDelayEstimator* self,
+ int delay_shift) {
+ int lookahead = 0;
+ assert(self != NULL);
+ lookahead = self->lookahead;
+ self->lookahead -= delay_shift;
+ if (self->lookahead < 0) {
+ self->lookahead = 0;
+ }
+ if (self->lookahead > self->near_history_size - 1) {
+ self->lookahead = self->near_history_size - 1;
+ }
+ return lookahead - self->lookahead;
+}
+
int WebRtc_ProcessBinarySpectrum(BinaryDelayEstimator* self,
uint32_t binary_near_spectrum) {
int i = 0;
int candidate_delay = -1;
int valid_candidate = 0;
- int32_t value_best_candidate = 32 << 9; // 32 in Q9, (max |mean_bit_counts|).
+ int32_t value_best_candidate = kMaxBitCountsQ9;
int32_t value_worst_candidate = 0;
int32_t valley_depth = 0;
@@ -430,8 +477,7 @@ int WebRtc_ProcessBinarySpectrum(BinaryDelayEstimator* self,
memmove(&(self->binary_near_history[1]), &(self->binary_near_history[0]),
(self->near_history_size - 1) * sizeof(uint32_t));
self->binary_near_history[0] = binary_near_spectrum;
- binary_near_spectrum =
- self->binary_near_history[self->near_history_size - 1];
+ binary_near_spectrum = self->binary_near_history[self->lookahead];
}
// Compare with delayed spectra and store the |bit_counts| for each delay.
@@ -547,21 +593,23 @@ int WebRtc_binary_last_delay(BinaryDelayEstimator* self) {
return self->last_delay;
}
-int WebRtc_binary_last_delay_quality(BinaryDelayEstimator* self) {
- int delay_quality = 0;
+float WebRtc_binary_last_delay_quality(BinaryDelayEstimator* self) {
+ float quality = 0;
assert(self != NULL);
- // |last_delay_probability| is the opposite of quality and states how deep the
- // minimum of the cost function is. The value states how many non-matching
- // bits we have between the binary spectra for the corresponding delay
- // estimate. The range is thus from 0 to 32, since we use 32 bits in the
- // binary spectra.
-
- // Return the |delay_quality| = 1 - |last_delay_probability| / 32 (in Q14).
- delay_quality = (32 << 9) - self->last_delay_probability;
- if (delay_quality < 0) {
- delay_quality = 0;
+
+ if (self->robust_validation_enabled) {
+ // Simply a linear function of the histogram height at delay estimate.
+ quality = self->histogram[self->compare_delay] / kHistogramMax;
+ } else {
+ // Note that |last_delay_probability| states how deep the minimum of the
+ // cost function is, so it is rather an error probability.
+ quality = (float) (kMaxBitCountsQ9 - self->last_delay_probability) /
+ kMaxBitCountsQ9;
+ if (quality < 0) {
+ quality = 0;
+ }
}
- return delay_quality;
+ return quality;
}
void WebRtc_MeanEstimatorFix(int32_t new_value,
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.h b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.h
index 7ffb81b8b18..3d5ffce20e9 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator.h
@@ -16,6 +16,8 @@
#include "webrtc/typedefs.h"
+static const int32_t kMaxBitCountsQ9 = (32 << 9); // 32 matching bits in Q9.
+
typedef struct {
// Pointer to bit counts.
int* far_bit_counts;
@@ -44,12 +46,16 @@ typedef struct {
// Robust validation
int robust_validation_enabled;
+ int allowed_offset;
int last_candidate_delay;
int compare_delay;
int candidate_hits;
float* histogram;
float last_delay_histogram;
+ // For dynamically changing the lookahead when using SoftReset...().
+ int lookahead;
+
// Far-end binary spectrum history buffer etc.
BinaryDelayEstimatorFarend* farend;
} BinaryDelayEstimator;
@@ -90,6 +96,15 @@ BinaryDelayEstimatorFarend* WebRtc_CreateBinaryDelayEstimatorFarend(
//
void WebRtc_InitBinaryDelayEstimatorFarend(BinaryDelayEstimatorFarend* self);
+// Soft resets the delay estimation far-end instance created with
+// WebRtc_CreateBinaryDelayEstimatorFarend(...).
+//
+// Input:
+// - delay_shift : The amount of blocks to shift history buffers.
+//
+void WebRtc_SoftResetBinaryDelayEstimatorFarend(
+ BinaryDelayEstimatorFarend* self, int delay_shift);
+
// Adds the binary far-end spectrum to the internal far-end history buffer. This
// spectrum is used as reference when calculating the delay using
// WebRtc_ProcessBinarySpectrum().
@@ -121,38 +136,10 @@ void WebRtc_FreeBinaryDelayEstimator(BinaryDelayEstimator* self);
// Allocates the memory needed by the binary delay estimation. The memory needs
// to be initialized separately through WebRtc_InitBinaryDelayEstimator(...).
//
-// Inputs:
-// - farend : Pointer to the far-end part of the Binary Delay
-// Estimator. This memory has to be created separately
-// prior to this call using
-// WebRtc_CreateBinaryDelayEstimatorFarend().
-//
-// Note that BinaryDelayEstimator does not take
-// ownership of |farend|.
-//
-// - lookahead : Amount of non-causal lookahead to use. This can
-// detect cases in which a near-end signal occurs before
-// the corresponding far-end signal. It will delay the
-// estimate for the current block by an equal amount,
-// and the returned values will be offset by it.
-//
-// A value of zero is the typical no-lookahead case.
-// This also represents the minimum delay which can be
-// estimated.
-//
-// Note that the effective range of delay estimates is
-// [-|lookahead|,... ,|history_size|-|lookahead|)
-// where |history_size| was set upon creating the far-end
-// history buffer size.
-//
-// Return value:
-// - BinaryDelayEstimator*
-// : Created |handle|. If the memory can't be allocated
-// or if any of the input parameters are invalid NULL
-// is returned.
-//
+// See WebRtc_CreateDelayEstimator(..) in delay_estimator_wrapper.c for detailed
+// description.
BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator(
- BinaryDelayEstimatorFarend* farend, int lookahead);
+ BinaryDelayEstimatorFarend* farend, int max_lookahead);
// Initializes the delay estimation instance created with
// WebRtc_CreateBinaryDelayEstimator(...).
@@ -165,6 +152,18 @@ BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator(
//
void WebRtc_InitBinaryDelayEstimator(BinaryDelayEstimator* self);
+// Soft resets the delay estimation instance created with
+// WebRtc_CreateBinaryDelayEstimator(...).
+//
+// Input:
+// - delay_shift : The amount of blocks to shift history buffers.
+//
+// Return value:
+// - actual_shifts : The actual number of shifts performed.
+//
+int WebRtc_SoftResetBinaryDelayEstimator(BinaryDelayEstimator* self,
+ int delay_shift);
+
// Estimates and returns the delay between the binary far-end and binary near-
// end spectra. It is assumed the binary far-end spectrum has been added using
// WebRtc_AddBinaryFarSpectrum() prior to this call. The value will be offset by
@@ -199,17 +198,12 @@ int WebRtc_binary_last_delay(BinaryDelayEstimator* self);
// Returns the estimation quality of the last calculated delay updated by the
// function WebRtc_ProcessBinarySpectrum(...). The estimation quality is a value
-// in the interval [0, 1] in Q14. The higher the value, the better quality.
-//
-// Input:
-// - self : Pointer to the delay estimation instance.
+// in the interval [0, 1]. The higher the value, the better the quality.
//
// Return value:
-// - delay_quality : >= 0 - Estimation quality (in Q14) of last
-// calculated delay value.
-// -2 - Insufficient data for estimation.
-//
-int WebRtc_binary_last_delay_quality(BinaryDelayEstimator* self);
+// - delay_quality : >= 0 - Estimation quality of last calculated
+// delay value.
+float WebRtc_binary_last_delay_quality(BinaryDelayEstimator* self);
// Updates the |mean_value| recursively with a step size of 2^-|factor|. This
// function is used internally in the Binary Delay Estimator as well as the
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc
index bdc199cafbb..ca0901d6db2 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_unittest.cc
@@ -26,6 +26,9 @@ enum { kLookahead = 10 };
// Length of binary spectrum sequence.
enum { kSequenceLength = 400 };
+const int kEnable[] = { 0, 1 };
+const size_t kSizeEnable = sizeof(kEnable) / sizeof(*kEnable);
+
class DelayEstimatorTest : public ::testing::Test {
protected:
DelayEstimatorTest();
@@ -38,7 +41,8 @@ class DelayEstimatorTest : public ::testing::Test {
void RunBinarySpectra(BinaryDelayEstimator* binary1,
BinaryDelayEstimator* binary2,
int near_offset, int lookahead_offset, int far_offset);
- void RunBinarySpectraTest(int near_offset, int lookahead_offset);
+ void RunBinarySpectraTest(int near_offset, int lookahead_offset,
+ int ref_robust_validation, int robust_validation);
void* handle_;
DelayEstimator* self_;
@@ -113,7 +117,7 @@ void DelayEstimatorTest::Init() {
EXPECT_EQ(0, farend_self_->far_spectrum_initialized);
EXPECT_EQ(0, self_->near_spectrum_initialized);
EXPECT_EQ(-2, WebRtc_last_delay(handle_)); // Delay in initial state.
- EXPECT_EQ(0, WebRtc_last_delay_quality(handle_)); // Zero quality.
+ EXPECT_FLOAT_EQ(0, WebRtc_last_delay_quality(handle_)); // Zero quality.
}
void DelayEstimatorTest::InitBinary() {
@@ -143,6 +147,8 @@ void DelayEstimatorTest::RunBinarySpectra(BinaryDelayEstimator* binary1,
int near_offset,
int lookahead_offset,
int far_offset) {
+ int different_validations = binary1->robust_validation_enabled ^
+ binary2->robust_validation_enabled;
WebRtc_InitBinaryDelayEstimatorFarend(binary_farend_);
WebRtc_InitBinaryDelayEstimator(binary1);
WebRtc_InitBinaryDelayEstimator(binary2);
@@ -167,19 +173,32 @@ void DelayEstimatorTest::RunBinarySpectra(BinaryDelayEstimator* binary1,
if ((delay_1 != -2) && (delay_2 != -2)) {
EXPECT_EQ(delay_1, delay_2 - lookahead_offset - near_offset);
}
+ // For the case of identical signals |delay_1| and |delay_2| should match
+ // all the time, unless one of them has robust validation turned on. In
+ // that case the robust validation leaves the initial state faster.
if ((near_offset == 0) && (lookahead_offset == 0)) {
- EXPECT_EQ(delay_1, delay_2);
+ if (!different_validations) {
+ EXPECT_EQ(delay_1, delay_2);
+ } else {
+ if (binary1->robust_validation_enabled) {
+ EXPECT_GE(delay_1, delay_2);
+ } else {
+ EXPECT_GE(delay_2, delay_1);
+ }
+ }
}
}
// Verify that we have left the initialized state.
EXPECT_NE(-2, WebRtc_binary_last_delay(binary1));
- EXPECT_NE(0, WebRtc_binary_last_delay_quality(binary1));
+ EXPECT_LT(0, WebRtc_binary_last_delay_quality(binary1));
EXPECT_NE(-2, WebRtc_binary_last_delay(binary2));
- EXPECT_NE(0, WebRtc_binary_last_delay_quality(binary2));
+ EXPECT_LT(0, WebRtc_binary_last_delay_quality(binary2));
}
void DelayEstimatorTest::RunBinarySpectraTest(int near_offset,
- int lookahead_offset) {
+ int lookahead_offset,
+ int ref_robust_validation,
+ int robust_validation) {
BinaryDelayEstimator* binary2 =
WebRtc_CreateBinaryDelayEstimator(binary_farend_,
kLookahead + lookahead_offset);
@@ -187,6 +206,8 @@ void DelayEstimatorTest::RunBinarySpectraTest(int near_offset,
// the delay is equivalent with a positive |offset| of the far-end sequence.
// For non-causal systems the delay is equivalent with a negative |offset| of
// the far-end sequence.
+ binary_->robust_validation_enabled = ref_robust_validation;
+ binary2->robust_validation_enabled = robust_validation;
for (int offset = -kLookahead;
offset < kMaxDelay - lookahead_offset - near_offset;
offset++) {
@@ -194,6 +215,7 @@ void DelayEstimatorTest::RunBinarySpectraTest(int near_offset,
}
WebRtc_FreeBinaryDelayEstimator(binary2);
binary2 = NULL;
+ binary_->robust_validation_enabled = 0; // Reset reference.
}
TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfWrapper) {
@@ -206,14 +228,12 @@ TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfWrapper) {
void* handle = farend_handle_;
handle = WebRtc_CreateDelayEstimatorFarend(33, kMaxDelay + kLookahead);
EXPECT_TRUE(handle == NULL);
- handle = farend_handle_;
handle = WebRtc_CreateDelayEstimatorFarend(kSpectrumSize, 1);
EXPECT_TRUE(handle == NULL);
handle = handle_;
handle = WebRtc_CreateDelayEstimator(NULL, kLookahead);
EXPECT_TRUE(handle == NULL);
- handle = handle_;
handle = WebRtc_CreateDelayEstimator(farend_handle_, -1);
EXPECT_TRUE(handle == NULL);
@@ -246,10 +266,18 @@ TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfWrapper) {
EXPECT_EQ(-1, WebRtc_AddFarSpectrumFix(farend_handle_, far_u16_,
spectrum_size_, 16));
+ // WebRtc_set_allowed_offset() should return -1 if we have:
+ // 1) NULL pointer as |handle|.
+ // 2) |allowed_offset| < 0.
+ EXPECT_EQ(-1, WebRtc_set_allowed_offset(NULL, 0));
+ EXPECT_EQ(-1, WebRtc_set_allowed_offset(handle_, -1));
+
+ EXPECT_EQ(-1, WebRtc_get_allowed_offset(NULL));
+
// WebRtc_enable_robust_validation() should return -1 if we have:
// 1) NULL pointer as |handle|.
// 2) Incorrect |enable| value (not 0 or 1).
- EXPECT_EQ(-1, WebRtc_enable_robust_validation(NULL, 0));
+ EXPECT_EQ(-1, WebRtc_enable_robust_validation(NULL, kEnable[0]));
EXPECT_EQ(-1, WebRtc_enable_robust_validation(handle_, -1));
EXPECT_EQ(-1, WebRtc_enable_robust_validation(handle_, 2));
@@ -286,21 +314,31 @@ TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfWrapper) {
// WebRtc_last_delay() should return -1 if we have a NULL pointer as |handle|.
EXPECT_EQ(-1, WebRtc_last_delay(NULL));
- // WebRtc_last_delay_quality() should return -1 if we have a NULL pointer as
- // |handle|.
- EXPECT_EQ(-1, WebRtc_last_delay_quality(NULL));
-
// Free any local memory if needed.
WebRtc_FreeDelayEstimator(handle);
}
+TEST_F(DelayEstimatorTest, VerifyAllowedOffset) {
+ // Is set to zero by default.
+ EXPECT_EQ(0, WebRtc_get_allowed_offset(handle_));
+ for (int i = 1; i >= 0; i--) {
+ EXPECT_EQ(0, WebRtc_set_allowed_offset(handle_, i));
+ EXPECT_EQ(i, WebRtc_get_allowed_offset(handle_));
+ Init();
+ // Unaffected over a reset.
+ EXPECT_EQ(i, WebRtc_get_allowed_offset(handle_));
+ }
+}
+
TEST_F(DelayEstimatorTest, VerifyEnableRobustValidation) {
- Init();
// Disabled by default.
EXPECT_EQ(0, WebRtc_is_robust_validation_enabled(handle_));
- for (int i = 1; i >= 0; i--) {
- EXPECT_EQ(0, WebRtc_enable_robust_validation(handle_, i));
- EXPECT_EQ(i, WebRtc_is_robust_validation_enabled(handle_));
+ for (size_t i = 0; i < kSizeEnable; ++i) {
+ EXPECT_EQ(0, WebRtc_enable_robust_validation(handle_, kEnable[i]));
+ EXPECT_EQ(kEnable[i], WebRtc_is_robust_validation_enabled(handle_));
+ Init();
+ // Unaffected over a reset.
+ EXPECT_EQ(kEnable[i], WebRtc_is_robust_validation_enabled(handle_));
}
}
@@ -335,6 +373,7 @@ TEST_F(DelayEstimatorTest, CorrectLastDelay) {
// (|last_delay| = -2). Then we compare the Process() output with the
// last_delay() call.
+ // TODO(bjornv): Update quality values for robust validation.
int last_delay = 0;
// Floating point operations.
Init();
@@ -345,13 +384,16 @@ TEST_F(DelayEstimatorTest, CorrectLastDelay) {
spectrum_size_);
if (last_delay != -2) {
EXPECT_EQ(last_delay, WebRtc_last_delay(handle_));
- EXPECT_EQ(7203, WebRtc_last_delay_quality(handle_));
+ if (!WebRtc_is_robust_validation_enabled(handle_)) {
+ EXPECT_FLOAT_EQ(7203.f / kMaxBitCountsQ9,
+ WebRtc_last_delay_quality(handle_));
+ }
break;
}
}
// Verify that we have left the initialized state.
EXPECT_NE(-2, WebRtc_last_delay(handle_));
- EXPECT_NE(0, WebRtc_last_delay_quality(handle_));
+ EXPECT_LT(0, WebRtc_last_delay_quality(handle_));
// Fixed point operations.
Init();
@@ -362,13 +404,16 @@ TEST_F(DelayEstimatorTest, CorrectLastDelay) {
spectrum_size_, 0);
if (last_delay != -2) {
EXPECT_EQ(last_delay, WebRtc_last_delay(handle_));
- EXPECT_EQ(7203, WebRtc_last_delay_quality(handle_));
+ if (!WebRtc_is_robust_validation_enabled(handle_)) {
+ EXPECT_FLOAT_EQ(7203.f / kMaxBitCountsQ9,
+ WebRtc_last_delay_quality(handle_));
+ }
break;
}
}
// Verify that we have left the initialized state.
EXPECT_NE(-2, WebRtc_last_delay(handle_));
- EXPECT_NE(0, WebRtc_last_delay_quality(handle_));
+ EXPECT_LT(0, WebRtc_last_delay_quality(handle_));
}
TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfBinaryEstimatorFarend) {
@@ -391,18 +436,14 @@ TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfBinaryEstimator) {
BinaryDelayEstimator* binary_handle = binary_;
// WebRtc_CreateBinaryDelayEstimator() should return -1 if we have a NULL
- // pointer as |binary_handle| or invalid input values. Upon failure, the
+ // pointer as |binary_farend| or invalid input values. Upon failure, the
// |binary_handle| should be NULL.
// Make sure we have a non-NULL value at start, so we can detect NULL after
// create failure.
binary_handle = WebRtc_CreateBinaryDelayEstimator(NULL, kLookahead);
EXPECT_TRUE(binary_handle == NULL);
- binary_handle = binary_;
binary_handle = WebRtc_CreateBinaryDelayEstimator(binary_farend_, -1);
EXPECT_TRUE(binary_handle == NULL);
- binary_handle = binary_;
- binary_handle = WebRtc_CreateBinaryDelayEstimator(0, 0);
- EXPECT_TRUE(binary_handle == NULL);
}
TEST_F(DelayEstimatorTest, MeanEstimatorFix) {
@@ -431,26 +472,70 @@ TEST_F(DelayEstimatorTest, ExactDelayEstimateMultipleNearSameSpectrum) {
// the signal accordingly. We create two Binary Delay Estimators and feed them
// with the same signals, so they should output the same results.
// We verify both causal and non-causal delays.
+ // For these noise free signals, the robust validation should not have an
+ // impact, hence we turn robust validation on/off for both reference and
+ // delayed near end.
- RunBinarySpectraTest(0, 0);
+ for (size_t i = 0; i < kSizeEnable; ++i) {
+ for (size_t j = 0; j < kSizeEnable; ++j) {
+ RunBinarySpectraTest(0, 0, kEnable[i], kEnable[j]);
+ }
+ }
}
TEST_F(DelayEstimatorTest, ExactDelayEstimateMultipleNearDifferentSpectrum) {
// In this test we use the same setup as above, but we now feed the two Binary
// Delay Estimators with different signals, so they should output different
// results.
+ // For these noise free signals, the robust validation should not have an
+ // impact, hence we turn robust validation on/off for both reference and
+ // delayed near end.
const int kNearOffset = 1;
- RunBinarySpectraTest(kNearOffset, 0);
+ for (size_t i = 0; i < kSizeEnable; ++i) {
+ for (size_t j = 0; j < kSizeEnable; ++j) {
+ RunBinarySpectraTest(kNearOffset, 0, kEnable[i], kEnable[j]);
+ }
+ }
}
TEST_F(DelayEstimatorTest, ExactDelayEstimateMultipleNearDifferentLookahead) {
// In this test we use the same setup as above, feeding the two Binary
// Delay Estimators with the same signals. The difference is that we create
// them with different lookahead.
+ // For these noise free signals, the robust validation should not have an
+ // impact, hence we turn robust validation on/off for both reference and
+ // delayed near end.
const int kLookaheadOffset = 1;
- RunBinarySpectraTest(0, kLookaheadOffset);
+ for (size_t i = 0; i < kSizeEnable; ++i) {
+ for (size_t j = 0; j < kSizeEnable; ++j) {
+ RunBinarySpectraTest(0, kLookaheadOffset, kEnable[i], kEnable[j]);
+ }
+ }
}
+TEST_F(DelayEstimatorTest, AllowedOffsetNoImpactWhenRobustValidationDisabled) {
+ // The same setup as in ExactDelayEstimateMultipleNearSameSpectrum with the
+ // difference that |allowed_offset| is set for the reference binary delay
+ // estimator.
+
+ binary_->allowed_offset = 10;
+ RunBinarySpectraTest(0, 0, 0, 0);
+ binary_->allowed_offset = 0; // Reset reference.
+}
+
+TEST_F(DelayEstimatorTest, VerifyLookaheadAtCreate) {
+ void* farend_handle = WebRtc_CreateDelayEstimatorFarend(kSpectrumSize,
+ kMaxDelay);
+ ASSERT_TRUE(farend_handle != NULL);
+ void* handle = WebRtc_CreateDelayEstimator(farend_handle, kLookahead);
+ ASSERT_TRUE(handle != NULL);
+ EXPECT_EQ(kLookahead, WebRtc_lookahead(handle));
+ WebRtc_FreeDelayEstimator(handle);
+ WebRtc_FreeDelayEstimatorFarend(farend_handle);
+}
+
+// TODO(bjornv): Add tests for SoftReset...(...).
+
} // namespace
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c
index ce4431844ee..6ec894e65e9 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.c
@@ -191,6 +191,12 @@ int WebRtc_InitDelayEstimatorFarend(void* handle) {
return 0;
}
+void WebRtc_SoftResetDelayEstimatorFarend(void* handle, int delay_shift) {
+ DelayEstimatorFarend* self = (DelayEstimatorFarend*) handle;
+ assert(self != NULL);
+ WebRtc_SoftResetBinaryDelayEstimatorFarend(self->binary_farend, delay_shift);
+}
+
int WebRtc_AddFarSpectrumFix(void* handle, uint16_t* far_spectrum,
int spectrum_size, int far_q) {
DelayEstimatorFarend* self = (DelayEstimatorFarend*) handle;
@@ -261,7 +267,7 @@ void WebRtc_FreeDelayEstimator(void* handle) {
free(self);
}
-void* WebRtc_CreateDelayEstimator(void* farend_handle, int lookahead) {
+void* WebRtc_CreateDelayEstimator(void* farend_handle, int max_lookahead) {
DelayEstimator* self = NULL;
DelayEstimatorFarend* farend = (DelayEstimatorFarend*) farend_handle;
@@ -274,7 +280,7 @@ void* WebRtc_CreateDelayEstimator(void* farend_handle, int lookahead) {
// Allocate memory for the farend spectrum handling.
self->binary_handle =
- WebRtc_CreateBinaryDelayEstimator(farend->binary_farend, lookahead);
+ WebRtc_CreateBinaryDelayEstimator(farend->binary_farend, max_lookahead);
memory_fail |= (self->binary_handle == NULL);
// Allocate memory for spectrum buffers.
@@ -312,6 +318,50 @@ int WebRtc_InitDelayEstimator(void* handle) {
return 0;
}
+int WebRtc_SoftResetDelayEstimator(void* handle, int delay_shift) {
+ DelayEstimator* self = (DelayEstimator*) handle;
+ assert(self != NULL);
+ return WebRtc_SoftResetBinaryDelayEstimator(self->binary_handle, delay_shift);
+}
+
+int WebRtc_set_lookahead(void* handle, int lookahead) {
+ DelayEstimator* self = (DelayEstimator*) handle;
+ assert(self != NULL);
+ assert(self->binary_handle != NULL);
+ if ((lookahead > self->binary_handle->near_history_size - 1) ||
+ (lookahead < 0)) {
+ return -1;
+ }
+ self->binary_handle->lookahead = lookahead;
+ return self->binary_handle->lookahead;
+}
+
+int WebRtc_lookahead(void* handle) {
+ DelayEstimator* self = (DelayEstimator*) handle;
+ assert(self != NULL);
+ assert(self->binary_handle != NULL);
+ return self->binary_handle->lookahead;
+}
+
+int WebRtc_set_allowed_offset(void* handle, int allowed_offset) {
+ DelayEstimator* self = (DelayEstimator*) handle;
+
+ if ((self == NULL) || (allowed_offset < 0)) {
+ return -1;
+ }
+ self->binary_handle->allowed_offset = allowed_offset;
+ return 0;
+}
+
+int WebRtc_get_allowed_offset(const void* handle) {
+ const DelayEstimator* self = (const DelayEstimator*) handle;
+
+ if (self == NULL) {
+ return -1;
+ }
+ return self->binary_handle->allowed_offset;
+}
+
int WebRtc_enable_robust_validation(void* handle, int enable) {
DelayEstimator* self = (DelayEstimator*) handle;
@@ -326,13 +376,12 @@ int WebRtc_enable_robust_validation(void* handle, int enable) {
return 0;
}
-int WebRtc_is_robust_validation_enabled(void* handle) {
- DelayEstimator* self = (DelayEstimator*) handle;
+int WebRtc_is_robust_validation_enabled(const void* handle) {
+ const DelayEstimator* self = (const DelayEstimator*) handle;
if (self == NULL) {
return -1;
}
- assert(self->binary_handle != NULL);
return self->binary_handle->robust_validation_enabled;
}
@@ -403,12 +452,8 @@ int WebRtc_last_delay(void* handle) {
return WebRtc_binary_last_delay(self->binary_handle);
}
-int WebRtc_last_delay_quality(void* handle) {
+float WebRtc_last_delay_quality(void* handle) {
DelayEstimator* self = (DelayEstimator*) handle;
-
- if (self == NULL) {
- return -1;
- }
-
+ assert(self != NULL);
return WebRtc_binary_last_delay_quality(self->binary_handle);
}
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h
index 50bcddeddc2..13e86bdd438 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/delay_estimator_wrapper.h
@@ -52,6 +52,13 @@ void* WebRtc_CreateDelayEstimatorFarend(int spectrum_size, int history_size);
//
int WebRtc_InitDelayEstimatorFarend(void* handle);
+// Soft resets the far-end part of the delay estimation instance returned by
+// WebRtc_CreateDelayEstimatorFarend(...).
+// Input:
+// - delay_shift : The amount of blocks to shift history buffers.
+//
+void WebRtc_SoftResetDelayEstimatorFarend(void* handle, int delay_shift);
+
// Adds the far-end spectrum to the far-end history buffer. This spectrum is
// used as reference when calculating the delay using
// WebRtc_ProcessSpectrum().
@@ -91,11 +98,18 @@ void WebRtc_FreeDelayEstimator(void* handle);
// ownership of |farend_handle|, which has to be torn
// down properly after this instance.
//
-// - lookahead : Amount of non-causal lookahead to use. This can
-// detect cases in which a near-end signal occurs before
-// the corresponding far-end signal. It will delay the
-// estimate for the current block by an equal amount,
-// and the returned values will be offset by it.
+// - max_lookahead : Maximum amount of non-causal lookahead allowed. The
+// actual amount of lookahead used can be controlled by
+// WebRtc_set_lookahead(...). The default |lookahead| is
+// set to |max_lookahead| at create time. Use
+// WebRtc_set_lookahead(...) before start if a different
+// value is desired.
+//
+// Using lookahead can detect cases in which a near-end
+// signal occurs before the corresponding far-end signal.
+// It will delay the estimate for the current block by an
+// equal amount, and the returned values will be offset
+// by it.
//
// A value of zero is the typical no-lookahead case.
// This also represents the minimum delay which can be
@@ -111,7 +125,7 @@ void WebRtc_FreeDelayEstimator(void* handle);
// if any of the input parameters are invalid NULL is
// returned.
//
-void* WebRtc_CreateDelayEstimator(void* farend_handle, int lookahead);
+void* WebRtc_CreateDelayEstimator(void* farend_handle, int max_lookahead);
// Initializes the delay estimation instance returned by
// WebRtc_CreateDelayEstimator(...)
@@ -123,17 +137,59 @@ void* WebRtc_CreateDelayEstimator(void* farend_handle, int lookahead);
//
int WebRtc_InitDelayEstimator(void* handle);
+// Soft resets the delay estimation instance returned by
+// WebRtc_CreateDelayEstimator(...)
+// Input:
+// - delay_shift : The amount of blocks to shift history buffers.
+//
+// Return value:
+// - actual_shifts : The actual number of shifts performed.
+//
+int WebRtc_SoftResetDelayEstimator(void* handle, int delay_shift);
+
+// Sets the amount of |lookahead| to use. Valid values are [0, max_lookahead]
+// where |max_lookahead| was set at create time through
+// WebRtc_CreateDelayEstimator(...).
+//
+// Input:
+// - lookahead : The amount of blocks to shift history buffers.
+//
+// Return value:
+// - new_lookahead : The actual number of shifts performed.
+//
+int WebRtc_set_lookahead(void* handle, int lookahead);
+
+// Returns the amount of lookahead we currently use.
+int WebRtc_lookahead(void* handle);
+
+// Sets the |allowed_offset| used in the robust validation scheme. If the
+// delay estimator is used in an echo control component, this parameter is
+// related to the filter length. In principle |allowed_offset| should be set to
+// the echo control filter length minus the expected echo duration, i.e., the
+// delay offset the echo control can handle without quality regression. The
+// default value, used if not set manually, is zero. Note that |allowed_offset|
+// has to be non-negative.
+// Inputs:
+// - handle : Pointer to the delay estimation instance.
+// - allowed_offset : The amount of delay offset, measured in partitions,
+// the echo control filter can handle.
+int WebRtc_set_allowed_offset(void* handle, int allowed_offset);
+
+// Returns the |allowed_offset| in number of partitions.
+int WebRtc_get_allowed_offset(const void* handle);
+
// TODO(bjornv): Implement this functionality. Currently, enabling it has no
// impact, hence this is an empty API.
// Enables/Disables a robust validation functionality in the delay estimation.
-// This is by default disabled upon initialization.
+// This is by default set to disabled at create time. The state is preserved
+// over a reset.
// Inputs:
// - handle : Pointer to the delay estimation instance.
// - enable : Enable (1) or disable (0) this feature.
int WebRtc_enable_robust_validation(void* handle, int enable);
// Returns 1 if robust validation is enabled and 0 if disabled.
-int WebRtc_is_robust_validation_enabled(void* handle);
+int WebRtc_is_robust_validation_enabled(const void* handle);
// Estimates and returns the delay between the far-end and near-end blocks. The
// value will be offset by the lookahead (i.e. the lookahead should be
@@ -179,18 +235,11 @@ int WebRtc_last_delay(void* handle);
// Returns the estimation quality/probability of the last calculated delay
// updated by the function WebRtc_DelayEstimatorProcess(...). The estimation
-// quality is a value in the interval [0, 1] in Q9. The higher the value, the
-// better quality.
-//
-// Input:
-// - handle : Pointer to the delay estimation instance.
+// quality is a value in the interval [0, 1]. The higher the value, the better
+// the quality.
//
// Return value:
-// - delay_quality : >= 0 - Estimation quality (in Q9) of last calculated
-// delay value.
-// -1 - Error.
-// -2 - Insufficient data for estimation.
-//
-int WebRtc_last_delay_quality(void* handle);
+// - delay_quality : >= 0 - Estimation quality of last calculated delay.
+float WebRtc_last_delay_quality(void* handle);
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_WRAPPER_H_
diff --git a/chromium/third_party/webrtc/modules/audio_processing/utility/ring_buffer_unittest.cc b/chromium/third_party/webrtc/modules/audio_processing/utility/ring_buffer_unittest.cc
index 2b7634dd073..5dacf0b804c 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/utility/ring_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/utility/ring_buffer_unittest.cc
@@ -22,7 +22,12 @@ extern "C" {
namespace webrtc {
-typedef scoped_ptr_malloc<RingBuffer, WebRtc_FreeBuffer> scoped_ring_buffer;
+struct FreeBufferDeleter {
+ inline void operator()(void* ptr) const {
+ WebRtc_FreeBuffer(ptr);
+ }
+};
+typedef scoped_ptr<RingBuffer, FreeBufferDeleter> scoped_ring_buffer;
static void AssertElementEq(int expected, int actual) {
ASSERT_EQ(expected, actual);
@@ -56,8 +61,8 @@ static void RandomStressTest(int** data_ptr) {
srand(seed);
for (int i = 0; i < kNumTests; i++) {
const int buffer_size = std::max(rand() % kMaxBufferSize, 1);
- scoped_array<int> write_data(new int[buffer_size]);
- scoped_array<int> read_data(new int[buffer_size]);
+ scoped_ptr<int[]> write_data(new int[buffer_size]);
+ scoped_ptr<int[]> read_data(new int[buffer_size]);
scoped_ring_buffer buffer(WebRtc_CreateBuffer(buffer_size, sizeof(int)));
ASSERT_TRUE(buffer.get() != NULL);
ASSERT_EQ(0, WebRtc_InitBuffer(buffer.get()));
diff --git a/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc b/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc
index d41547c8022..c6e497ffa3e 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc
+++ b/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.cc
@@ -13,10 +13,8 @@
#include <assert.h>
#include "webrtc/common_audio/vad/include/webrtc_vad.h"
-#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-
#include "webrtc/modules/audio_processing/audio_buffer.h"
-#include "webrtc/modules/audio_processing/audio_processing_impl.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
namespace webrtc {
@@ -39,9 +37,11 @@ int MapSetting(VoiceDetection::Likelihood likelihood) {
}
} // namespace
-VoiceDetectionImpl::VoiceDetectionImpl(const AudioProcessingImpl* apm)
- : ProcessingComponent(apm),
+VoiceDetectionImpl::VoiceDetectionImpl(const AudioProcessing* apm,
+ CriticalSectionWrapper* crit)
+ : ProcessingComponent(),
apm_(apm),
+ crit_(crit),
stream_has_voice_(false),
using_external_vad_(false),
likelihood_(kLowLikelihood),
@@ -61,7 +61,7 @@ int VoiceDetectionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
}
assert(audio->samples_per_split_channel() <= 160);
- int16_t* mixed_data = audio->low_pass_split_data(0);
+ const int16_t* mixed_data = audio->low_pass_split_data(0);
if (audio->num_channels() > 1) {
audio->CopyAndMixLowPass(1);
mixed_data = audio->mixed_low_pass_data(0);
@@ -70,7 +70,7 @@ int VoiceDetectionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
// TODO(ajm): concatenate data in frame buffer here.
int vad_ret = WebRtcVad_Process(static_cast<Handle*>(handle(0)),
- apm_->split_sample_rate_hz(),
+ apm_->proc_split_sample_rate_hz(),
mixed_data,
frame_size_samples_);
if (vad_ret == 0) {
@@ -87,7 +87,7 @@ int VoiceDetectionImpl::ProcessCaptureAudio(AudioBuffer* audio) {
}
int VoiceDetectionImpl::Enable(bool enable) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
return EnableComponent(enable);
}
@@ -108,7 +108,7 @@ bool VoiceDetectionImpl::stream_has_voice() const {
}
int VoiceDetectionImpl::set_likelihood(VoiceDetection::Likelihood likelihood) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
if (MapSetting(likelihood) == -1) {
return apm_->kBadParameterError;
}
@@ -122,7 +122,7 @@ VoiceDetection::Likelihood VoiceDetectionImpl::likelihood() const {
}
int VoiceDetectionImpl::set_frame_size_ms(int size) {
- CriticalSectionScoped crit_scoped(apm_->crit());
+ CriticalSectionScoped crit_scoped(crit_);
assert(size == 10); // TODO(ajm): remove when supported.
if (size != 10 &&
size != 20 &&
@@ -146,7 +146,8 @@ int VoiceDetectionImpl::Initialize() {
}
using_external_vad_ = false;
- frame_size_samples_ = frame_size_ms_ * (apm_->split_sample_rate_hz() / 1000);
+ frame_size_samples_ = frame_size_ms_ *
+ apm_->proc_split_sample_rate_hz() / 1000;
// TODO(ajm): intialize frame buffer here.
return apm_->kNoError;
@@ -163,8 +164,8 @@ void* VoiceDetectionImpl::CreateHandle() const {
return handle;
}
-int VoiceDetectionImpl::DestroyHandle(void* handle) const {
- return WebRtcVad_Free(static_cast<Handle*>(handle));
+void VoiceDetectionImpl::DestroyHandle(void* handle) const {
+ WebRtcVad_Free(static_cast<Handle*>(handle));
}
int VoiceDetectionImpl::InitializeHandle(void* handle) const {
diff --git a/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.h b/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.h
index f8f50e8493c..1dfdf20ae92 100644
--- a/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.h
+++ b/chromium/third_party/webrtc/modules/audio_processing/voice_detection_impl.h
@@ -15,13 +15,14 @@
#include "webrtc/modules/audio_processing/processing_component.h"
namespace webrtc {
-class AudioProcessingImpl;
+
class AudioBuffer;
+class CriticalSectionWrapper;
class VoiceDetectionImpl : public VoiceDetection,
public ProcessingComponent {
public:
- explicit VoiceDetectionImpl(const AudioProcessingImpl* apm);
+ VoiceDetectionImpl(const AudioProcessing* apm, CriticalSectionWrapper* crit);
virtual ~VoiceDetectionImpl();
int ProcessCaptureAudio(AudioBuffer* audio);
@@ -46,11 +47,12 @@ class VoiceDetectionImpl : public VoiceDetection,
virtual void* CreateHandle() const OVERRIDE;
virtual int InitializeHandle(void* handle) const OVERRIDE;
virtual int ConfigureHandle(void* handle) const OVERRIDE;
- virtual int DestroyHandle(void* handle) const OVERRIDE;
+ virtual void DestroyHandle(void* handle) const OVERRIDE;
virtual int num_handles_required() const OVERRIDE;
virtual int GetHandleError(void* handle) const OVERRIDE;
- const AudioProcessingImpl* apm_;
+ const AudioProcessing* apm_;
+ CriticalSectionWrapper* crit_;
bool stream_has_voice_;
bool using_external_vad_;
Likelihood likelihood_;
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/OWNERS b/chromium/third_party/webrtc/modules/bitrate_controller/OWNERS
index 6c7028550b6..cce3a26bc5e 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/OWNERS
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/OWNERS
@@ -3,3 +3,8 @@ stefan@webrtc.org
henrik.lundin@webrtc.org
mflodman@webrtc.org
asapersson@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc
index c2c8616559b..cff5dd18545 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.cc
@@ -18,7 +18,8 @@
namespace webrtc {
-class RtcpBandwidthObserverImpl : public RtcpBandwidthObserver {
+class BitrateControllerImpl::RtcpBandwidthObserverImpl
+ : public RtcpBandwidthObserver {
public:
explicit RtcpBandwidthObserverImpl(BitrateControllerImpl* owner)
: owner_(owner) {
@@ -76,94 +77,29 @@ class RtcpBandwidthObserverImpl : public RtcpBandwidthObserver {
BitrateControllerImpl* owner_;
};
-class LowRateStrategy {
- public:
- LowRateStrategy(
- SendSideBandwidthEstimation* bandwidth_estimation,
- BitrateControllerImpl::BitrateObserverConfList* bitrate_observers)
- : bandwidth_estimation_(bandwidth_estimation),
- bitrate_observers_(bitrate_observers) {}
-
- virtual ~LowRateStrategy() {}
-
- virtual void LowRateAllocation(uint32_t bitrate,
- uint8_t fraction_loss,
- uint32_t rtt,
- uint32_t sum_min_bitrates) = 0;
-
- protected:
- SendSideBandwidthEstimation* bandwidth_estimation_;
- BitrateControllerImpl::BitrateObserverConfList* bitrate_observers_;
-};
-
-class EnforceMinRateStrategy : public LowRateStrategy {
- public:
- EnforceMinRateStrategy(
- SendSideBandwidthEstimation* bandwidth_estimation,
- BitrateControllerImpl::BitrateObserverConfList* bitrate_observers)
- : LowRateStrategy(bandwidth_estimation, bitrate_observers) {}
-
- void LowRateAllocation(uint32_t bitrate,
- uint8_t fraction_loss,
- uint32_t rtt,
- uint32_t sum_min_bitrates) {
- // Min bitrate to all observers.
- BitrateControllerImpl::BitrateObserverConfList::iterator it;
- for (it = bitrate_observers_->begin(); it != bitrate_observers_->end();
- ++it) {
- it->first->OnNetworkChanged(it->second->min_bitrate_, fraction_loss,
- rtt);
- }
- // Set sum of min to current send bitrate.
- bandwidth_estimation_->SetSendBitrate(sum_min_bitrates);
- }
-};
-
-class NoEnforceMinRateStrategy : public LowRateStrategy {
- public:
- NoEnforceMinRateStrategy(
- SendSideBandwidthEstimation* bandwidth_estimation,
- BitrateControllerImpl::BitrateObserverConfList* bitrate_observers)
- : LowRateStrategy(bandwidth_estimation, bitrate_observers) {}
-
- void LowRateAllocation(uint32_t bitrate,
- uint8_t fraction_loss,
- uint32_t rtt,
- uint32_t sum_min_bitrates) {
- // Allocate up to |min_bitrate_| to one observer at a time, until
- // |bitrate| is depleted.
- uint32_t remainder = bitrate;
- BitrateControllerImpl::BitrateObserverConfList::iterator it;
- for (it = bitrate_observers_->begin(); it != bitrate_observers_->end();
- ++it) {
- uint32_t allocation = std::min(remainder, it->second->min_bitrate_);
- it->first->OnNetworkChanged(allocation, fraction_loss, rtt);
- remainder -= allocation;
- }
- // Set |bitrate| to current send bitrate.
- bandwidth_estimation_->SetSendBitrate(bitrate);
- }
-};
-
BitrateController* BitrateController::CreateBitrateController(
+ Clock* clock,
bool enforce_min_bitrate) {
- return new BitrateControllerImpl(enforce_min_bitrate);
+ return new BitrateControllerImpl(clock, enforce_min_bitrate);
}
-BitrateControllerImpl::BitrateControllerImpl(bool enforce_min_bitrate)
- : critsect_(CriticalSectionWrapper::CreateCriticalSection()) {
- if (enforce_min_bitrate) {
- low_rate_strategy_.reset(new EnforceMinRateStrategy(
- &bandwidth_estimation_, &bitrate_observers_));
- } else {
- low_rate_strategy_.reset(new NoEnforceMinRateStrategy(
- &bandwidth_estimation_, &bitrate_observers_));
- }
-}
+BitrateControllerImpl::BitrateControllerImpl(Clock* clock, bool enforce_min_bitrate)
+ : clock_(clock),
+ last_bitrate_update_ms_(clock_->TimeInMilliseconds()),
+ critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+ bandwidth_estimation_(),
+ bitrate_observers_(),
+ enforce_min_bitrate_(enforce_min_bitrate),
+ reserved_bitrate_bps_(0),
+ last_bitrate_bps_(0),
+ last_fraction_loss_(0),
+ last_rtt_ms_(0),
+ last_enforce_min_bitrate_(!enforce_min_bitrate_),
+ bitrate_observers_modified_(false),
+ last_reserved_bitrate_bps_(0) {}
BitrateControllerImpl::~BitrateControllerImpl() {
- BitrateObserverConfList::iterator it =
- bitrate_observers_.begin();
+ BitrateObserverConfList::iterator it = bitrate_observers_.begin();
while (it != bitrate_observers_.end()) {
delete it->second;
bitrate_observers_.erase(it);
@@ -203,26 +139,56 @@ void BitrateControllerImpl::SetBitrateObserver(
it->second->start_bitrate_ = start_bitrate;
it->second->min_bitrate_ = min_bitrate;
it->second->max_bitrate_ = max_bitrate;
+ // Set the send-side bandwidth to the max of the sum of start bitrates and
+ // the current estimate, so that if the user wants to immediately use more
+ // bandwidth, that can be enforced.
+ uint32_t sum_start_bitrate = 0;
+ BitrateObserverConfList::iterator it;
+ for (it = bitrate_observers_.begin(); it != bitrate_observers_.end();
+ ++it) {
+ sum_start_bitrate += it->second->start_bitrate_;
+ }
+ uint32_t current_estimate;
+ uint8_t loss;
+ uint32_t rtt;
+ bandwidth_estimation_.CurrentEstimate(&current_estimate, &loss, &rtt);
+ bandwidth_estimation_.SetSendBitrate(std::max(sum_start_bitrate,
+ current_estimate));
} else {
// Add new settings.
bitrate_observers_.push_back(BitrateObserverConfiguration(observer,
new BitrateConfiguration(start_bitrate, min_bitrate, max_bitrate)));
+ bitrate_observers_modified_ = true;
+
+ // TODO(andresp): This is a ugly way to set start bitrate.
+ //
+ // Only change start bitrate if we have exactly one observer. By definition
+ // you can only have one start bitrate, once we have our first estimate we
+ // will adapt from there.
+ if (bitrate_observers_.size() == 1) {
+ bandwidth_estimation_.SetSendBitrate(start_bitrate);
+ }
}
- uint32_t sum_start_bitrate = 0;
+
+ UpdateMinMaxBitrate();
+}
+
+void BitrateControllerImpl::UpdateMinMaxBitrate() {
uint32_t sum_min_bitrate = 0;
uint32_t sum_max_bitrate = 0;
-
- // Summarize all configurations.
+ BitrateObserverConfList::iterator it;
for (it = bitrate_observers_.begin(); it != bitrate_observers_.end(); ++it) {
- sum_start_bitrate += it->second->start_bitrate_;
sum_min_bitrate += it->second->min_bitrate_;
sum_max_bitrate += it->second->max_bitrate_;
}
- // Only change start bitrate if we have exactly one observer. By definition
- // you can only have one start bitrate, once we have our first estimate we
- // will adapt from there.
- if (bitrate_observers_.size() == 1) {
- bandwidth_estimation_.SetSendBitrate(sum_start_bitrate);
+ if (sum_max_bitrate == 0) {
+ // No max configured use 1Gbit/s.
+ sum_max_bitrate = 1000000000;
+ }
+ if (enforce_min_bitrate_ == false) {
+ // If not enforcing min bitrate, allow the bandwidth estimation to
+ // go as low as 10 kbps.
+ sum_min_bitrate = std::min(sum_min_bitrate, 10000u);
}
bandwidth_estimation_.SetMinMaxBitrate(sum_min_bitrate,
sum_max_bitrate);
@@ -235,31 +201,46 @@ void BitrateControllerImpl::RemoveBitrateObserver(BitrateObserver* observer) {
if (it != bitrate_observers_.end()) {
delete it->second;
bitrate_observers_.erase(it);
+ bitrate_observers_modified_ = true;
}
}
void BitrateControllerImpl::EnforceMinBitrate(bool enforce_min_bitrate) {
CriticalSectionScoped cs(critsect_);
- if (enforce_min_bitrate) {
- low_rate_strategy_.reset(new EnforceMinRateStrategy(
- &bandwidth_estimation_, &bitrate_observers_));
- } else {
- low_rate_strategy_.reset(new NoEnforceMinRateStrategy(
- &bandwidth_estimation_, &bitrate_observers_));
- }
+ enforce_min_bitrate_ = enforce_min_bitrate;
+ UpdateMinMaxBitrate();
+}
+
+void BitrateControllerImpl::SetReservedBitrate(uint32_t reserved_bitrate_bps) {
+ CriticalSectionScoped cs(critsect_);
+ reserved_bitrate_bps_ = reserved_bitrate_bps;
+ MaybeTriggerOnNetworkChanged();
}
void BitrateControllerImpl::OnReceivedEstimatedBitrate(const uint32_t bitrate) {
- uint32_t new_bitrate = 0;
- uint8_t fraction_lost = 0;
- uint16_t rtt = 0;
CriticalSectionScoped cs(critsect_);
- if (bandwidth_estimation_.UpdateBandwidthEstimate(bitrate,
- &new_bitrate,
- &fraction_lost,
- &rtt)) {
- OnNetworkChanged(new_bitrate, fraction_lost, rtt);
+ bandwidth_estimation_.UpdateReceiverEstimate(bitrate);
+ MaybeTriggerOnNetworkChanged();
+}
+
+int32_t BitrateControllerImpl::TimeUntilNextProcess() {
+ enum { kBitrateControllerUpdateIntervalMs = 25 };
+ CriticalSectionScoped cs(critsect_);
+ int time_since_update_ms =
+ clock_->TimeInMilliseconds() - last_bitrate_update_ms_;
+ return std::max(0, kBitrateControllerUpdateIntervalMs - time_since_update_ms);
+}
+
+int32_t BitrateControllerImpl::Process() {
+ if (TimeUntilNextProcess() > 0)
+ return 0;
+ {
+ CriticalSectionScoped cs(critsect_);
+ bandwidth_estimation_.UpdateEstimate(clock_->TimeInMilliseconds());
+ MaybeTriggerOnNetworkChanged();
}
+ last_bitrate_update_ms_ = clock_->TimeInMilliseconds();
+ return 0;
}
void BitrateControllerImpl::OnReceivedRtcpReceiverReport(
@@ -267,37 +248,63 @@ void BitrateControllerImpl::OnReceivedRtcpReceiverReport(
const uint32_t rtt,
const int number_of_packets,
const uint32_t now_ms) {
- uint32_t new_bitrate = 0;
- uint8_t loss = fraction_loss;
CriticalSectionScoped cs(critsect_);
- if (bandwidth_estimation_.UpdatePacketLoss(number_of_packets, rtt, now_ms,
- &loss, &new_bitrate)) {
- OnNetworkChanged(new_bitrate, loss, rtt);
+ bandwidth_estimation_.UpdateReceiverBlock(
+ fraction_loss, rtt, number_of_packets, now_ms);
+ MaybeTriggerOnNetworkChanged();
+}
+
+void BitrateControllerImpl::MaybeTriggerOnNetworkChanged() {
+ uint32_t bitrate;
+ uint8_t fraction_loss;
+ uint32_t rtt;
+ bandwidth_estimation_.CurrentEstimate(&bitrate, &fraction_loss, &rtt);
+ bitrate -= std::min(bitrate, reserved_bitrate_bps_);
+
+ if (bitrate_observers_modified_ ||
+ bitrate != last_bitrate_bps_ ||
+ fraction_loss != last_fraction_loss_ ||
+ rtt != last_rtt_ms_ ||
+ last_enforce_min_bitrate_ != enforce_min_bitrate_ ||
+ last_reserved_bitrate_bps_ != reserved_bitrate_bps_) {
+ last_bitrate_bps_ = bitrate;
+ last_fraction_loss_ = fraction_loss;
+ last_rtt_ms_ = rtt;
+ last_enforce_min_bitrate_ = enforce_min_bitrate_;
+ last_reserved_bitrate_bps_ = reserved_bitrate_bps_;
+ bitrate_observers_modified_ = false;
+ OnNetworkChanged(bitrate, fraction_loss, rtt);
}
}
-// We have the lock here.
void BitrateControllerImpl::OnNetworkChanged(const uint32_t bitrate,
const uint8_t fraction_loss,
const uint32_t rtt) {
// Sanity check.
- uint32_t number_of_observers = bitrate_observers_.size();
- if (number_of_observers == 0) {
+ if (bitrate_observers_.empty())
return;
- }
+
uint32_t sum_min_bitrates = 0;
BitrateObserverConfList::iterator it;
for (it = bitrate_observers_.begin(); it != bitrate_observers_.end(); ++it) {
sum_min_bitrates += it->second->min_bitrate_;
}
- if (bitrate <= sum_min_bitrates) {
- return low_rate_strategy_->LowRateAllocation(bitrate, fraction_loss, rtt,
- sum_min_bitrates);
- }
+ if (bitrate <= sum_min_bitrates)
+ return LowRateAllocation(bitrate, fraction_loss, rtt, sum_min_bitrates);
+ else
+ return NormalRateAllocation(bitrate, fraction_loss, rtt, sum_min_bitrates);
+}
+
+void BitrateControllerImpl::NormalRateAllocation(uint32_t bitrate,
+ uint8_t fraction_loss,
+ uint32_t rtt,
+ uint32_t sum_min_bitrates) {
+ uint32_t number_of_observers = bitrate_observers_.size();
uint32_t bitrate_per_observer = (bitrate - sum_min_bitrates) /
number_of_observers;
// Use map to sort list based on max bitrate.
ObserverSortingMap list_max_bitrates;
+ BitrateObserverConfList::iterator it;
for (it = bitrate_observers_.begin(); it != bitrate_observers_.end(); ++it) {
list_max_bitrates.insert(std::pair<uint32_t, ObserverConfiguration*>(
it->second->max_bitrate_,
@@ -328,8 +335,46 @@ void BitrateControllerImpl::OnNetworkChanged(const uint32_t bitrate,
}
}
+void BitrateControllerImpl::LowRateAllocation(uint32_t bitrate,
+ uint8_t fraction_loss,
+ uint32_t rtt,
+ uint32_t sum_min_bitrates) {
+ if (enforce_min_bitrate_) {
+ // Min bitrate to all observers.
+ BitrateControllerImpl::BitrateObserverConfList::iterator it;
+ for (it = bitrate_observers_.begin(); it != bitrate_observers_.end();
+ ++it) {
+ it->first->OnNetworkChanged(it->second->min_bitrate_, fraction_loss, rtt);
+ }
+ // Set sum of min to current send bitrate.
+ bandwidth_estimation_.SetSendBitrate(sum_min_bitrates);
+ } else {
+ // Allocate up to |min_bitrate_| to one observer at a time, until
+ // |bitrate| is depleted.
+ uint32_t remainder = bitrate;
+ BitrateControllerImpl::BitrateObserverConfList::iterator it;
+ for (it = bitrate_observers_.begin(); it != bitrate_observers_.end();
+ ++it) {
+ uint32_t allocation = std::min(remainder, it->second->min_bitrate_);
+ it->first->OnNetworkChanged(allocation, fraction_loss, rtt);
+ remainder -= allocation;
+ }
+ // Set |bitrate| to current send bitrate.
+ bandwidth_estimation_.SetSendBitrate(bitrate);
+ }
+}
+
bool BitrateControllerImpl::AvailableBandwidth(uint32_t* bandwidth) const {
- return bandwidth_estimation_.AvailableBandwidth(bandwidth);
+ CriticalSectionScoped cs(critsect_);
+ uint32_t bitrate;
+ uint8_t fraction_loss;
+ uint32_t rtt;
+ bandwidth_estimation_.CurrentEstimate(&bitrate, &fraction_loss, &rtt);
+ if (bitrate) {
+ *bandwidth = bitrate - std::min(bitrate, reserved_bitrate_bps_);
+ return true;
+ }
+ return false;
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h
index 62ed6fda5ab..aff127bd142 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_impl.h
@@ -27,12 +27,30 @@
namespace webrtc {
-class RtcpBandwidthObserverImpl;
-class LowRateStrategy;
-
class BitrateControllerImpl : public BitrateController {
public:
- friend class RtcpBandwidthObserverImpl;
+ BitrateControllerImpl(Clock* clock, bool enforce_min_bitrate);
+ virtual ~BitrateControllerImpl();
+
+ virtual bool AvailableBandwidth(uint32_t* bandwidth) const OVERRIDE;
+
+ virtual RtcpBandwidthObserver* CreateRtcpBandwidthObserver() OVERRIDE;
+
+ virtual void SetBitrateObserver(BitrateObserver* observer,
+ const uint32_t start_bitrate,
+ const uint32_t min_bitrate,
+ const uint32_t max_bitrate) OVERRIDE;
+
+ virtual void RemoveBitrateObserver(BitrateObserver* observer) OVERRIDE;
+
+ virtual void EnforceMinBitrate(bool enforce_min_bitrate) OVERRIDE;
+ virtual void SetReservedBitrate(uint32_t reserved_bitrate_bps) OVERRIDE;
+
+ virtual int32_t TimeUntilNextProcess() OVERRIDE;
+ virtual int32_t Process() OVERRIDE;
+
+ private:
+ class RtcpBandwidthObserverImpl;
struct BitrateConfiguration {
BitrateConfiguration(uint32_t start_bitrate,
@@ -59,23 +77,8 @@ class BitrateControllerImpl : public BitrateController {
BitrateObserverConfiguration;
typedef std::list<BitrateObserverConfiguration> BitrateObserverConfList;
- explicit BitrateControllerImpl(bool enforce_min_bitrate);
- virtual ~BitrateControllerImpl();
-
- virtual bool AvailableBandwidth(uint32_t* bandwidth) const OVERRIDE;
-
- virtual RtcpBandwidthObserver* CreateRtcpBandwidthObserver() OVERRIDE;
-
- virtual void SetBitrateObserver(BitrateObserver* observer,
- const uint32_t start_bitrate,
- const uint32_t min_bitrate,
- const uint32_t max_bitrate) OVERRIDE;
-
- virtual void RemoveBitrateObserver(BitrateObserver* observer) OVERRIDE;
+ void UpdateMinMaxBitrate() EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
- virtual void EnforceMinBitrate(bool enforce_min_bitrate) OVERRIDE;
-
- private:
// Called by BitrateObserver's direct from the RTCP module.
void OnReceivedEstimatedBitrate(const uint32_t bitrate);
@@ -84,18 +87,48 @@ class BitrateControllerImpl : public BitrateController {
const int number_of_packets,
const uint32_t now_ms);
- typedef std::multimap<uint32_t, ObserverConfiguration*> ObserverSortingMap;
+ void MaybeTriggerOnNetworkChanged() EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
- BitrateObserverConfList::iterator
- FindObserverConfigurationPair(const BitrateObserver* observer);
void OnNetworkChanged(const uint32_t bitrate,
const uint8_t fraction_loss, // 0 - 255.
- const uint32_t rtt);
+ const uint32_t rtt)
+ EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
+
+ void NormalRateAllocation(uint32_t bitrate,
+ uint8_t fraction_loss,
+ uint32_t rtt,
+ uint32_t sum_min_bitrates)
+ EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
+
+ void LowRateAllocation(uint32_t bitrate,
+ uint8_t fraction_loss,
+ uint32_t rtt,
+ uint32_t sum_min_bitrates)
+ EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
+
+ typedef std::multimap<uint32_t, ObserverConfiguration*> ObserverSortingMap;
+
+ BitrateObserverConfList::iterator FindObserverConfigurationPair(
+ const BitrateObserver* observer) EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
+
+ // Used by process thread.
+ Clock* clock_;
+ uint32_t last_bitrate_update_ms_;
CriticalSectionWrapper* critsect_;
- SendSideBandwidthEstimation bandwidth_estimation_;
- BitrateObserverConfList bitrate_observers_;
- scoped_ptr<LowRateStrategy> low_rate_strategy_;
+ SendSideBandwidthEstimation bandwidth_estimation_ GUARDED_BY(*critsect_);
+ BitrateObserverConfList bitrate_observers_ GUARDED_BY(*critsect_);
+ bool enforce_min_bitrate_ GUARDED_BY(*critsect_);
+ uint32_t reserved_bitrate_bps_ GUARDED_BY(*critsect_);
+
+ uint32_t last_bitrate_bps_ GUARDED_BY(*critsect_);
+ uint8_t last_fraction_loss_ GUARDED_BY(*critsect_);
+ uint32_t last_rtt_ms_ GUARDED_BY(*critsect_);
+ bool last_enforce_min_bitrate_ GUARDED_BY(*critsect_);
+ bool bitrate_observers_modified_ GUARDED_BY(*critsect_);
+ uint32_t last_reserved_bitrate_bps_ GUARDED_BY(*critsect_);
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(BitrateControllerImpl);
};
} // namespace webrtc
#endif // WEBRTC_MODULES_BITRATE_CONTROLLER_BITRATE_CONTROLLER_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc
index 30f85a81cb9..8523d505b24 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/bitrate_controller_unittest.cc
@@ -57,12 +57,12 @@ class TestBitrateObserver: public BitrateObserver {
class BitrateControllerTest : public ::testing::Test {
protected:
- BitrateControllerTest() : enforce_min_bitrate_(true) {}
+ BitrateControllerTest() : clock_(0), enforce_min_bitrate_(true) {}
~BitrateControllerTest() {}
virtual void SetUp() {
- controller_ =
- BitrateController::CreateBitrateController(enforce_min_bitrate_);
+ controller_ = BitrateController::CreateBitrateController(
+ &clock_, enforce_min_bitrate_);
bandwidth_observer_ = controller_->CreateRtcpBandwidthObserver();
}
@@ -70,6 +70,8 @@ class BitrateControllerTest : public ::testing::Test {
delete bandwidth_observer_;
delete controller_;
}
+
+ webrtc::SimulatedClock clock_;
bool enforce_min_bitrate_;
BitrateController* controller_;
RtcpBandwidthObserver* bandwidth_observer_;
@@ -81,58 +83,74 @@ TEST_F(BitrateControllerTest, Basic) {
controller_->RemoveBitrateObserver(&bitrate_observer);
}
+TEST_F(BitrateControllerTest, UpdatingBitrateObserver) {
+ TestBitrateObserver bitrate_observer;
+ controller_->SetBitrateObserver(&bitrate_observer, 200000, 100000, 1500000);
+ clock_.AdvanceTimeMilliseconds(25);
+ controller_->Process();
+ EXPECT_EQ(200000u, bitrate_observer.last_bitrate_);
+
+ controller_->SetBitrateObserver(&bitrate_observer, 1500000, 100000, 1500000);
+ clock_.AdvanceTimeMilliseconds(25);
+ controller_->Process();
+ EXPECT_EQ(1500000u, bitrate_observer.last_bitrate_);
+
+ controller_->SetBitrateObserver(&bitrate_observer, 500000, 100000, 1500000);
+ clock_.AdvanceTimeMilliseconds(25);
+ controller_->Process();
+ EXPECT_EQ(1500000u, bitrate_observer.last_bitrate_);
+}
+
TEST_F(BitrateControllerTest, OneBitrateObserverOneRtcpObserver) {
TestBitrateObserver bitrate_observer;
controller_->SetBitrateObserver(&bitrate_observer, 200000, 100000, 300000);
// Receive a high remb, test bitrate inc.
bandwidth_observer_->OnReceivedEstimatedBitrate(400000);
+ EXPECT_EQ(200000u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
+ EXPECT_EQ(0u, bitrate_observer.last_rtt_);
- // Test start bitrate.
+ // Test bitrate increase 8% per second.
webrtc::ReportBlockList report_blocks;
report_blocks.push_back(CreateReportBlock(1, 2, 0, 1));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 1);
- EXPECT_EQ(0u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(217000u, bitrate_observer.last_bitrate_);
EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
- EXPECT_EQ(0u, bitrate_observer.last_rtt_);
+ EXPECT_EQ(50u, bitrate_observer.last_rtt_);
- // Test bitrate increase 8% per second.
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 21));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 1001);
- EXPECT_EQ(217000u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(235360u, bitrate_observer.last_bitrate_);
EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
EXPECT_EQ(50u, bitrate_observer.last_rtt_);
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 41));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 2001);
- EXPECT_EQ(235360u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(255189u, bitrate_observer.last_bitrate_);
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 61));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 3001);
- EXPECT_EQ(255189u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(276604u, bitrate_observer.last_bitrate_);
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 801));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 4001);
- EXPECT_EQ(276604u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(299732u, bitrate_observer.last_bitrate_);
+ // Reach max cap.
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 101));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 5001);
- EXPECT_EQ(299732u, bitrate_observer.last_bitrate_);
-
- report_blocks.clear();
- report_blocks.push_back(CreateReportBlock(1, 2, 0, 121));
- bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 6001);
- EXPECT_EQ(300000u, bitrate_observer.last_bitrate_); // Max cap.
+ EXPECT_EQ(300000u, bitrate_observer.last_bitrate_);
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 141));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 7001);
- EXPECT_EQ(300000u, bitrate_observer.last_bitrate_); // Max cap.
+ EXPECT_EQ(300000u, bitrate_observer.last_bitrate_);
// Test that a low REMB trigger immediately.
bandwidth_observer_->OnReceivedEstimatedBitrate(250000);
@@ -154,6 +172,9 @@ TEST_F(BitrateControllerTest, OneBitrateObserverTwoRtcpObservers) {
// Receive a high remb, test bitrate inc.
bandwidth_observer_->OnReceivedEstimatedBitrate(400000);
+ EXPECT_EQ(200000u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
+ EXPECT_EQ(0u, bitrate_observer.last_rtt_);
// Test start bitrate.
webrtc::ReportBlockList report_blocks;
@@ -161,9 +182,9 @@ TEST_F(BitrateControllerTest, OneBitrateObserverTwoRtcpObservers) {
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 1);
second_bandwidth_observer->OnReceivedRtcpReceiverReport(
report_blocks, 100, 1);
- EXPECT_EQ(0u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(217000u, bitrate_observer.last_bitrate_);
EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
- EXPECT_EQ(0u, bitrate_observer.last_rtt_);
+ EXPECT_EQ(100u, bitrate_observer.last_rtt_);
// Test bitrate increase 8% per second.
report_blocks.clear();
@@ -171,7 +192,7 @@ TEST_F(BitrateControllerTest, OneBitrateObserverTwoRtcpObservers) {
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 501);
second_bandwidth_observer->OnReceivedRtcpReceiverReport(report_blocks, 100,
1001);
- EXPECT_EQ(217000u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(235360u, bitrate_observer.last_bitrate_);
EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
EXPECT_EQ(100u, bitrate_observer.last_rtt_);
@@ -180,50 +201,45 @@ TEST_F(BitrateControllerTest, OneBitrateObserverTwoRtcpObservers) {
report_blocks.push_back(CreateReportBlock(1, 2, 0, 31));
second_bandwidth_observer->OnReceivedRtcpReceiverReport(report_blocks, 100,
1501);
- EXPECT_EQ(217000u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(235360u, bitrate_observer.last_bitrate_);
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 41));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 2001);
- EXPECT_EQ(235360u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(255189u, bitrate_observer.last_bitrate_);
// Second report should not change estimate.
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 41));
second_bandwidth_observer->OnReceivedRtcpReceiverReport(report_blocks, 100,
2001);
- EXPECT_EQ(235360u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(255189u, bitrate_observer.last_bitrate_);
// Reports from only one bandwidth observer is ok.
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 61));
second_bandwidth_observer->OnReceivedRtcpReceiverReport(report_blocks, 50,
3001);
- EXPECT_EQ(255189u, bitrate_observer.last_bitrate_);
+ EXPECT_EQ(276604u, bitrate_observer.last_bitrate_);
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 81));
second_bandwidth_observer->OnReceivedRtcpReceiverReport(report_blocks, 50,
4001);
- EXPECT_EQ(276604u, bitrate_observer.last_bitrate_);
-
- report_blocks.clear();
- report_blocks.push_back(CreateReportBlock(1, 2, 0, 101));
- second_bandwidth_observer->OnReceivedRtcpReceiverReport(report_blocks, 50,
- 5001);
EXPECT_EQ(299732u, bitrate_observer.last_bitrate_);
+ // Reach max cap.
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 121));
- second_bandwidth_observer->OnReceivedRtcpReceiverReport(report_blocks, 50,
- 6001);
- EXPECT_EQ(300000u, bitrate_observer.last_bitrate_); // Max cap.
+ second_bandwidth_observer->OnReceivedRtcpReceiverReport(
+ report_blocks, 50, 5001);
+ EXPECT_EQ(300000u, bitrate_observer.last_bitrate_);
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 141));
- second_bandwidth_observer->OnReceivedRtcpReceiverReport(report_blocks, 50,
- 7001);
- EXPECT_EQ(300000u, bitrate_observer.last_bitrate_); // Max cap.
+ second_bandwidth_observer->OnReceivedRtcpReceiverReport(
+ report_blocks, 50, 6001);
+ EXPECT_EQ(300000u, bitrate_observer.last_bitrate_);
// Test that a low REMB trigger immediately.
// We don't care which bandwidth observer that delivers the REMB.
@@ -232,8 +248,9 @@ TEST_F(BitrateControllerTest, OneBitrateObserverTwoRtcpObservers) {
EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
EXPECT_EQ(50u, bitrate_observer.last_rtt_);
+ // Min cap.
bandwidth_observer_->OnReceivedEstimatedBitrate(1000);
- EXPECT_EQ(100000u, bitrate_observer.last_bitrate_); // Min cap.
+ EXPECT_EQ(100000u, bitrate_observer.last_bitrate_);
controller_->RemoveBitrateObserver(&bitrate_observer);
delete second_bandwidth_observer;
}
@@ -317,40 +334,33 @@ TEST_F(BitrateControllerTest, TwoBitrateObserversOneRtcpObserver) {
controller_->SetBitrateObserver(&bitrate_observer_1, 200000, 100000, 300000);
// Receive a high remb, test bitrate inc.
+ // Test too low start bitrate, hence lower than sum of min.
bandwidth_observer_->OnReceivedEstimatedBitrate(400000);
+ EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_);
+ EXPECT_EQ(0, bitrate_observer_1.last_fraction_loss_);
+ EXPECT_EQ(0u, bitrate_observer_1.last_rtt_);
- // Test too low start bitrate, hence lower than sum of min.
+ // Test bitrate increase 8% per second, distributed equally.
webrtc::ReportBlockList report_blocks;
report_blocks.push_back(CreateReportBlock(1, 2, 0, 1));
- bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 1);
-
- // Test bitrate increase 8% per second, distributed equally.
- report_blocks.clear();
- report_blocks.push_back(CreateReportBlock(1, 2, 0, 21));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 1001);
- EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_);
+ EXPECT_EQ(112500u, bitrate_observer_1.last_bitrate_);
EXPECT_EQ(0, bitrate_observer_1.last_fraction_loss_);
EXPECT_EQ(50u, bitrate_observer_1.last_rtt_);
- EXPECT_EQ(200000u, bitrate_observer_2.last_bitrate_);
+ EXPECT_EQ(212500u, bitrate_observer_2.last_bitrate_);
EXPECT_EQ(0, bitrate_observer_2.last_fraction_loss_);
EXPECT_EQ(50u, bitrate_observer_2.last_rtt_);
report_blocks.clear();
report_blocks.push_back(CreateReportBlock(1, 2, 0, 41));
bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 2001);
- EXPECT_EQ(112500u, bitrate_observer_1.last_bitrate_);
- EXPECT_EQ(212500u, bitrate_observer_2.last_bitrate_);
-
- report_blocks.clear();
- report_blocks.push_back(CreateReportBlock(1, 2, 0, 61));
- bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 3001);
EXPECT_EQ(126000u, bitrate_observer_1.last_bitrate_);
EXPECT_EQ(226000u, bitrate_observer_2.last_bitrate_);
report_blocks.clear();
- report_blocks.push_back(CreateReportBlock(1, 2, 0, 81));
- bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 4001);
+ report_blocks.push_back(CreateReportBlock(1, 2, 0, 61));
+ bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, 50, 3001);
EXPECT_EQ(140580u, bitrate_observer_1.last_bitrate_);
EXPECT_EQ(240580u, bitrate_observer_2.last_bitrate_);
@@ -416,6 +426,61 @@ TEST_F(BitrateControllerTest, TwoBitrateObserversOneRtcpObserver) {
controller_->RemoveBitrateObserver(&bitrate_observer_2);
}
+TEST_F(BitrateControllerTest, SetReservedBitrate) {
+ TestBitrateObserver bitrate_observer;
+ controller_->SetBitrateObserver(&bitrate_observer, 200000, 100000, 300000);
+
+ // Receive successively lower REMBs, verify the reserved bitrate is deducted.
+
+ controller_->SetReservedBitrate(0);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(400000);
+ EXPECT_EQ(200000u, bitrate_observer.last_bitrate_);
+ controller_->SetReservedBitrate(50000);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(400000);
+ EXPECT_EQ(150000u, bitrate_observer.last_bitrate_);
+
+ controller_->SetReservedBitrate(0);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(250000);
+ EXPECT_EQ(200000u, bitrate_observer.last_bitrate_);
+ controller_->SetReservedBitrate(50000);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(250000);
+ EXPECT_EQ(150000u, bitrate_observer.last_bitrate_);
+
+ controller_->SetReservedBitrate(0);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(200000);
+ EXPECT_EQ(200000u, bitrate_observer.last_bitrate_);
+ controller_->SetReservedBitrate(30000);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(200000);
+ EXPECT_EQ(170000u, bitrate_observer.last_bitrate_);
+
+ controller_->SetReservedBitrate(0);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(160000);
+ EXPECT_EQ(160000u, bitrate_observer.last_bitrate_);
+ controller_->SetReservedBitrate(30000);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(160000);
+ EXPECT_EQ(130000u, bitrate_observer.last_bitrate_);
+
+ controller_->SetReservedBitrate(0);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(120000);
+ EXPECT_EQ(120000u, bitrate_observer.last_bitrate_);
+ controller_->SetReservedBitrate(10000);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(120000);
+ EXPECT_EQ(110000u, bitrate_observer.last_bitrate_);
+
+ controller_->SetReservedBitrate(0);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(120000);
+ EXPECT_EQ(120000u, bitrate_observer.last_bitrate_);
+ controller_->SetReservedBitrate(50000);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(120000);
+ EXPECT_EQ(100000u, bitrate_observer.last_bitrate_);
+
+ controller_->SetReservedBitrate(10000);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(0);
+ EXPECT_EQ(100000u, bitrate_observer.last_bitrate_);
+
+ controller_->RemoveBitrateObserver(&bitrate_observer);
+}
+
class BitrateControllerTestNoEnforceMin : public BitrateControllerTest {
protected:
BitrateControllerTestNoEnforceMin() : BitrateControllerTest() {
@@ -434,8 +499,32 @@ TEST_F(BitrateControllerTestNoEnforceMin, OneBitrateObserver) {
EXPECT_EQ(150000u, bitrate_observer_1.last_bitrate_);
// Low REMB.
- bandwidth_observer_->OnReceivedEstimatedBitrate(1000);
- EXPECT_EQ(1000u, bitrate_observer_1.last_bitrate_);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(10000);
+ EXPECT_EQ(10000u, bitrate_observer_1.last_bitrate_);
+
+ // Keeps at least 10 kbps.
+ bandwidth_observer_->OnReceivedEstimatedBitrate(9000);
+ EXPECT_EQ(10000u, bitrate_observer_1.last_bitrate_);
+
+ controller_->RemoveBitrateObserver(&bitrate_observer_1);
+}
+
+TEST_F(BitrateControllerTestNoEnforceMin, SetReservedBitrate) {
+ TestBitrateObserver bitrate_observer_1;
+ controller_->SetBitrateObserver(&bitrate_observer_1, 200000, 100000, 400000);
+ controller_->SetReservedBitrate(10000);
+
+ // High REMB.
+ bandwidth_observer_->OnReceivedEstimatedBitrate(150000);
+ EXPECT_EQ(140000u, bitrate_observer_1.last_bitrate_);
+
+ // Low REMB.
+ bandwidth_observer_->OnReceivedEstimatedBitrate(15000);
+ EXPECT_EQ(5000u, bitrate_observer_1.last_bitrate_);
+
+ // Keeps at least 10 kbps.
+ bandwidth_observer_->OnReceivedEstimatedBitrate(9000);
+ EXPECT_EQ(0u, bitrate_observer_1.last_bitrate_);
controller_->RemoveBitrateObserver(&bitrate_observer_1);
}
@@ -469,9 +558,15 @@ TEST_F(BitrateControllerTestNoEnforceMin, ThreeBitrateObservers) {
EXPECT_EQ(200000u, bitrate_observer_3.last_bitrate_); // Remainder.
// Low REMB.
- bandwidth_observer_->OnReceivedEstimatedBitrate(1000);
+ bandwidth_observer_->OnReceivedEstimatedBitrate(10000);
// Verify that the first observer gets all the rate, and the rest get zero.
- EXPECT_EQ(1000u, bitrate_observer_1.last_bitrate_);
+ EXPECT_EQ(10000u, bitrate_observer_1.last_bitrate_);
+ EXPECT_EQ(0u, bitrate_observer_2.last_bitrate_);
+ EXPECT_EQ(0u, bitrate_observer_3.last_bitrate_);
+
+ // Verify it keeps an estimate of at least 10kbps.
+ bandwidth_observer_->OnReceivedEstimatedBitrate(9000);
+ EXPECT_EQ(10000u, bitrate_observer_1.last_bitrate_);
EXPECT_EQ(0u, bitrate_observer_2.last_bitrate_);
EXPECT_EQ(0u, bitrate_observer_3.last_bitrate_);
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h b/chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h
index 0f743676581..46d7830300b 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/include/bitrate_controller.h
@@ -15,6 +15,7 @@
#ifndef WEBRTC_MODULES_BITRATE_CONTROLLER_INCLUDE_BITRATE_CONTROLLER_H_
#define WEBRTC_MODULES_BITRATE_CONTROLLER_INCLUDE_BITRATE_CONTROLLER_H_
+#include "webrtc/modules/interface/module.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
namespace webrtc {
@@ -35,7 +36,7 @@ class BitrateObserver {
virtual ~BitrateObserver() {}
};
-class BitrateController {
+class BitrateController : public Module {
/*
* This class collects feedback from all streams sent to a peer (via
* RTCPBandwidthObservers). It does one aggregated send side bandwidth
@@ -48,7 +49,8 @@ class BitrateController {
// When true, the bitrate will never be set lower than the minimum bitrate(s).
// When false, the bitrate observers will be allocated rates up to their
// respective minimum bitrate, satisfying one observer after the other.
- static BitrateController* CreateBitrateController(bool enforce_min_bitrate);
+ static BitrateController* CreateBitrateController(Clock* clock,
+ bool enforce_min_bitrate);
virtual ~BitrateController() {}
virtual RtcpBandwidthObserver* CreateRtcpBandwidthObserver() = 0;
@@ -73,6 +75,8 @@ class BitrateController {
// Changes the mode that was set in the constructor.
virtual void EnforceMinBitrate(bool enforce_min_bitrate) = 0;
+
+ virtual void SetReservedBitrate(uint32_t reserved_bitrate_bps) = 0;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_BITRATE_CONTROLLER_INCLUDE_BITRATE_CONTROLLER_H_
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc b/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc
index ce385db54c1..5da23f06624 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.cc
@@ -10,216 +10,204 @@
#include "webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h"
-#include <math.h> // sqrt()
+#include <cmath>
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
+namespace {
+enum { kBweIncreaseIntervalMs = 1000 };
+enum { kBweDecreaseIntervalMs = 300 };
+enum { kLimitNumPackets = 20 };
+enum { kAvgPacketSizeBytes = 1000 };
+
+// Calculate the rate that TCP-Friendly Rate Control (TFRC) would apply.
+// The formula in RFC 3448, Section 3.1, is used.
+uint32_t CalcTfrcBps(uint16_t rtt, uint8_t loss) {
+ if (rtt == 0 || loss == 0) {
+ // Input variables out of range.
+ return 0;
+ }
+ double R = static_cast<double>(rtt) / 1000; // RTT in seconds.
+ int b = 1; // Number of packets acknowledged by a single TCP acknowledgement:
+ // recommended = 1.
+ double t_RTO = 4.0 * R; // TCP retransmission timeout value in seconds
+ // recommended = 4*R.
+ double p = static_cast<double>(loss) / 255; // Packet loss rate in [0, 1).
+ double s = static_cast<double>(kAvgPacketSizeBytes);
+
+ // Calculate send rate in bytes/second.
+ double X =
+ s / (R * std::sqrt(2 * b * p / 3) +
+ (t_RTO * (3 * std::sqrt(3 * b * p / 8) * p * (1 + 32 * p * p))));
+
+ // Convert to bits/second.
+ return (static_cast<uint32_t>(X * 8));
+}
+}
SendSideBandwidthEstimation::SendSideBandwidthEstimation()
- : critsect_(CriticalSectionWrapper::CreateCriticalSection()),
- accumulate_lost_packets_Q8_(0),
+ : accumulate_lost_packets_Q8_(0),
accumulate_expected_packets_(0),
bitrate_(0),
min_bitrate_configured_(0),
max_bitrate_configured_(0),
+ time_last_receiver_block_ms_(0),
last_fraction_loss_(0),
- last_round_trip_time_(0),
+ last_round_trip_time_ms_(0),
bwe_incoming_(0),
- time_last_increase_(0),
- time_last_decrease_(0) {
-}
+ time_last_decrease_ms_(0) {}
-SendSideBandwidthEstimation::~SendSideBandwidthEstimation() {
- delete critsect_;
-}
+SendSideBandwidthEstimation::~SendSideBandwidthEstimation() {}
-void SendSideBandwidthEstimation::SetSendBitrate(const uint32_t bitrate) {
- CriticalSectionScoped cs(critsect_);
+void SendSideBandwidthEstimation::SetSendBitrate(uint32_t bitrate) {
bitrate_ = bitrate;
+
+ // Clear last sent bitrate history so the new value can be used directly
+ // and not capped.
+ min_bitrate_history_.clear();
}
-void SendSideBandwidthEstimation::SetMinMaxBitrate(const uint32_t min_bitrate,
- const uint32_t max_bitrate) {
- CriticalSectionScoped cs(critsect_);
+void SendSideBandwidthEstimation::SetMinMaxBitrate(uint32_t min_bitrate,
+ uint32_t max_bitrate) {
min_bitrate_configured_ = min_bitrate;
- if (max_bitrate == 0) {
- // no max configured use 1Gbit/s
- max_bitrate_configured_ = 1000000000;
- } else {
- max_bitrate_configured_ = max_bitrate;
- }
+ max_bitrate_configured_ = max_bitrate;
}
-bool SendSideBandwidthEstimation::UpdateBandwidthEstimate(
- const uint32_t bandwidth,
- uint32_t* new_bitrate,
- uint8_t* fraction_lost,
- uint16_t* rtt) {
- *new_bitrate = 0;
- CriticalSectionScoped cs(critsect_);
+void SendSideBandwidthEstimation::SetMinBitrate(uint32_t min_bitrate) {
+ min_bitrate_configured_ = min_bitrate;
+}
- bwe_incoming_ = bandwidth;
+void SendSideBandwidthEstimation::CurrentEstimate(uint32_t* bitrate,
+ uint8_t* loss,
+ uint32_t* rtt) const {
+ *bitrate = bitrate_;
+ *loss = last_fraction_loss_;
+ *rtt = last_round_trip_time_ms_;
+}
- if (bitrate_ == 0) {
- // SendSideBandwidthEstimation off
- return false;
- }
- if (bwe_incoming_ > 0 && bitrate_ > bwe_incoming_) {
- bitrate_ = bwe_incoming_;
- *new_bitrate = bitrate_;
- *fraction_lost = last_fraction_loss_;
- *rtt = last_round_trip_time_;
- return true;
- }
- return false;
+void SendSideBandwidthEstimation::UpdateReceiverEstimate(uint32_t bandwidth) {
+ bwe_incoming_ = bandwidth;
+ CapBitrateToThresholds();
}
-bool SendSideBandwidthEstimation::UpdatePacketLoss(
- const int number_of_packets,
- const uint32_t rtt,
- const uint32_t now_ms,
- uint8_t* loss,
- uint32_t* new_bitrate) {
- CriticalSectionScoped cs(critsect_);
-
- if (bitrate_ == 0) {
- // SendSideBandwidthEstimation off
- return false;
- }
+void SendSideBandwidthEstimation::UpdateReceiverBlock(uint8_t fraction_loss,
+ uint32_t rtt,
+ int number_of_packets,
+ uint32_t now_ms) {
// Update RTT.
- last_round_trip_time_ = rtt;
+ last_round_trip_time_ms_ = rtt;
// Check sequence number diff and weight loss report
if (number_of_packets > 0) {
// Calculate number of lost packets.
- const int num_lost_packets_Q8 = *loss * number_of_packets;
+ const int num_lost_packets_Q8 = fraction_loss * number_of_packets;
// Accumulate reports.
accumulate_lost_packets_Q8_ += num_lost_packets_Q8;
accumulate_expected_packets_ += number_of_packets;
// Report loss if the total report is based on sufficiently many packets.
if (accumulate_expected_packets_ >= kLimitNumPackets) {
- *loss = accumulate_lost_packets_Q8_ / accumulate_expected_packets_;
+ last_fraction_loss_ =
+ accumulate_lost_packets_Q8_ / accumulate_expected_packets_;
- // Reset accumulators
+ // Reset accumulators.
accumulate_lost_packets_Q8_ = 0;
accumulate_expected_packets_ = 0;
} else {
- // Report zero loss until we have enough data to estimate
- // the loss rate.
- return false;
+ // Early return without updating estimate.
+ return;
}
}
- // Keep for next time.
- last_fraction_loss_ = *loss;
- uint32_t bitrate = 0;
- if (!ShapeSimple(*loss, rtt, now_ms, &bitrate)) {
- // No change.
- return false;
- }
- bitrate_ = bitrate;
- *new_bitrate = bitrate;
- return true;
+ time_last_receiver_block_ms_ = now_ms;
+ UpdateEstimate(now_ms);
}
-bool SendSideBandwidthEstimation::AvailableBandwidth(
- uint32_t* bandwidth) const {
- CriticalSectionScoped cs(critsect_);
- if (bitrate_ == 0) {
- return false;
+void SendSideBandwidthEstimation::UpdateEstimate(uint32_t now_ms) {
+ UpdateMinHistory(now_ms);
+
+ // Only start updating bitrate when receiving receiver blocks.
+ if (time_last_receiver_block_ms_ != 0) {
+ if (last_fraction_loss_ <= 5) {
+ // Loss < 2%: Increase rate by 8% of the min bitrate in the last
+ // kBweIncreaseIntervalMs.
+ // Note that by remembering the bitrate over the last second one can
+ // rampup up one second faster than if only allowed to start ramping
+ // at 8% per second rate now. E.g.:
+ // If sending a constant 100kbps it can rampup immediatly to 108kbps
+ // whenever a receiver report is received with lower packet loss.
+ // If instead one would do: bitrate_ *= 1.08^(delta time), it would
+ // take over one second since the lower packet loss to achieve 108kbps.
+ bitrate_ = static_cast<uint32_t>(
+ min_bitrate_history_.front().second * 1.08 + 0.5);
+
+ // Add 1 kbps extra, just to make sure that we do not get stuck
+ // (gives a little extra increase at low rates, negligible at higher
+ // rates).
+ bitrate_ += 1000;
+
+ } else if (last_fraction_loss_ <= 26) {
+ // Loss between 2% - 10%: Do nothing.
+
+ } else {
+ // Loss > 10%: Limit the rate decreases to once a kBweDecreaseIntervalMs +
+ // rtt.
+ if ((now_ms - time_last_decrease_ms_) >=
+ static_cast<uint32_t>(kBweDecreaseIntervalMs +
+ last_round_trip_time_ms_)) {
+ time_last_decrease_ms_ = now_ms;
+
+ // Reduce rate:
+ // newRate = rate * (1 - 0.5*lossRate);
+ // where packetLoss = 256*lossRate;
+ bitrate_ = static_cast<uint32_t>(
+ (bitrate_ * static_cast<double>(512 - last_fraction_loss_)) /
+ 512.0);
+
+ // Calculate what rate TFRC would apply in this situation and to not
+ // reduce further than it.
+ bitrate_ = std::max(
+ bitrate_,
+ CalcTfrcBps(last_round_trip_time_ms_, last_fraction_loss_));
+ }
+ }
}
- *bandwidth = bitrate_;
- return true;
+ CapBitrateToThresholds();
}
-/*
- * Calculate the rate that TCP-Friendly Rate Control (TFRC) would apply.
- * The formula in RFC 3448, Section 3.1, is used.
- */
-uint32_t SendSideBandwidthEstimation::CalcTFRCbps(uint16_t rtt, uint8_t loss) {
- if (rtt == 0 || loss == 0) {
- // input variables out of range
- return 0;
+void SendSideBandwidthEstimation::UpdateMinHistory(uint32_t now_ms) {
+ // Remove old data points from history.
+ // Since history precision is in ms, add one so it is able to increase
+ // bitrate if it is off by as little as 0.5ms.
+ while (!min_bitrate_history_.empty() &&
+ now_ms - min_bitrate_history_.front().first + 1 >
+ kBweIncreaseIntervalMs) {
+ min_bitrate_history_.pop_front();
}
- double R = static_cast<double>(rtt) / 1000; // RTT in seconds
- int b = 1; // number of packets acknowledged by a single TCP acknowledgement;
- // recommended = 1
- double t_RTO = 4.0 * R; // TCP retransmission timeout value in seconds
- // recommended = 4*R
- double p = static_cast<double>(loss) / 255; // packet loss rate in [0, 1)
- double s = static_cast<double>(kAvgPacketSizeBytes);
- // calculate send rate in bytes/second
- double X = s / (R * sqrt(2 * b * p / 3) +
- (t_RTO * (3 * sqrt(3 * b * p / 8) * p * (1 + 32 * p * p))));
+ // Typical minimum sliding-window algorithm: Pop values higher than current
+ // bitrate before pushing it.
+ while (!min_bitrate_history_.empty() &&
+ bitrate_ <= min_bitrate_history_.back().second) {
+ min_bitrate_history_.pop_back();
+ }
- return (static_cast<uint32_t>(X * 8)); // bits/second
+ min_bitrate_history_.push_back(std::make_pair(now_ms, bitrate_));
}
-bool SendSideBandwidthEstimation::ShapeSimple(const uint8_t loss,
- const uint32_t rtt,
- const uint32_t now_ms,
- uint32_t* bitrate) {
- uint32_t new_bitrate = 0;
- bool reducing = false;
-
- // Limit the rate increases to once a kBWEIncreaseIntervalMs.
- if (loss <= 5) {
- if ((now_ms - time_last_increase_) < kBWEIncreaseIntervalMs) {
- return false;
- }
- time_last_increase_ = now_ms;
- }
- // Limit the rate decreases to once a kBWEDecreaseIntervalMs + rtt.
- if (loss > 26) {
- if ((now_ms - time_last_decrease_) < kBWEDecreaseIntervalMs + rtt) {
- return false;
- }
- time_last_decrease_ = now_ms;
- }
-
- if (loss > 5 && loss <= 26) {
- // 2% - 10%
- new_bitrate = bitrate_;
- } else if (loss > 26) {
- // 26/256 ~= 10%
- // reduce rate: newRate = rate * (1 - 0.5*lossRate)
- // packetLoss = 256*lossRate
- new_bitrate = static_cast<uint32_t>((bitrate_ *
- static_cast<double>(512 - loss)) / 512.0);
- reducing = true;
- } else {
- // increase rate by 8%
- new_bitrate = static_cast<uint32_t>(bitrate_ * 1.08 + 0.5);
-
- // add 1 kbps extra, just to make sure that we do not get stuck
- // (gives a little extra increase at low rates, negligible at higher rates)
- new_bitrate += 1000;
- }
- if (reducing) {
- // Calculate what rate TFRC would apply in this situation
- // scale loss to Q0 (back to [0, 255])
- uint32_t tfrc_bitrate = CalcTFRCbps(rtt, loss);
- if (tfrc_bitrate > new_bitrate) {
- // do not reduce further if rate is below TFRC rate
- new_bitrate = tfrc_bitrate;
- }
- }
- if (bwe_incoming_ > 0 && new_bitrate > bwe_incoming_) {
- new_bitrate = bwe_incoming_;
+void SendSideBandwidthEstimation::CapBitrateToThresholds() {
+ if (bwe_incoming_ > 0 && bitrate_ > bwe_incoming_) {
+ bitrate_ = bwe_incoming_;
}
- if (new_bitrate > max_bitrate_configured_) {
- new_bitrate = max_bitrate_configured_;
+ if (bitrate_ > max_bitrate_configured_) {
+ bitrate_ = max_bitrate_configured_;
}
- if (new_bitrate < min_bitrate_configured_) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
- "The configured min bitrate (%u kbps) is greater than the "
- "estimated available bandwidth (%u kbps).\n",
- min_bitrate_configured_ / 1000, new_bitrate / 1000);
- new_bitrate = min_bitrate_configured_;
+ if (bitrate_ < min_bitrate_configured_) {
+ LOG(LS_WARNING) << "Estimated available bandwidth " << bitrate_ / 1000
+ << " kbps is below configured min bitrate "
+ << min_bitrate_configured_ / 1000 << " kbps.";
+ bitrate_ = min_bitrate_configured_;
}
- *bitrate = new_bitrate;
- return true;
}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h b/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h
index 0c1fa94517d..eb675d1ca68 100644
--- a/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h
+++ b/chromium/third_party/webrtc/modules/bitrate_controller/send_side_bandwidth_estimation.h
@@ -13,6 +13,8 @@
#ifndef WEBRTC_MODULES_BITRATE_CONTROLLER_SEND_SIDE_BANDWIDTH_ESTIMATION_H_
#define WEBRTC_MODULES_BITRATE_CONTROLLER_SEND_SIDE_BANDWIDTH_ESTIMATION_H_
+#include <deque>
+
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
@@ -22,38 +24,33 @@ class SendSideBandwidthEstimation {
SendSideBandwidthEstimation();
virtual ~SendSideBandwidthEstimation();
- // Call when we receive a RTCP message with TMMBR or REMB
- // Return true if new_bitrate is valid.
- bool UpdateBandwidthEstimate(const uint32_t bandwidth,
- uint32_t* new_bitrate,
- uint8_t* fraction_lost,
- uint16_t* rtt);
-
- // Call when we receive a RTCP message with a ReceiveBlock
- // Return true if new_bitrate is valid.
- bool UpdatePacketLoss(const int number_of_packets,
- const uint32_t rtt,
- const uint32_t now_ms,
- uint8_t* loss,
- uint32_t* new_bitrate);
-
- // Return false if no bandwidth estimate is available
- bool AvailableBandwidth(uint32_t* bandwidth) const;
- void SetSendBitrate(const uint32_t bitrate);
- void SetMinMaxBitrate(const uint32_t min_bitrate, const uint32_t max_bitrate);
+ void CurrentEstimate(uint32_t* bitrate, uint8_t* loss, uint32_t* rtt) const;
- private:
- bool ShapeSimple(const uint8_t loss, const uint32_t rtt,
- const uint32_t now_ms, uint32_t* bitrate);
+ // Call periodically to update estimate.
+ void UpdateEstimate(uint32_t now_ms);
+
+ // Call when we receive a RTCP message with TMMBR or REMB.
+ void UpdateReceiverEstimate(uint32_t bandwidth);
- uint32_t CalcTFRCbps(uint16_t rtt, uint8_t loss);
+ // Call when we receive a RTCP message with a ReceiveBlock.
+ void UpdateReceiverBlock(uint8_t fraction_loss,
+ uint32_t rtt,
+ int number_of_packets,
+ uint32_t now_ms);
+
+ void SetSendBitrate(uint32_t bitrate);
+ void SetMinMaxBitrate(uint32_t min_bitrate, uint32_t max_bitrate);
+ void SetMinBitrate(uint32_t min_bitrate);
+
+ private:
+ void CapBitrateToThresholds();
- enum { kBWEIncreaseIntervalMs = 1000 };
- enum { kBWEDecreaseIntervalMs = 300 };
- enum { kLimitNumPackets = 20 };
- enum { kAvgPacketSizeBytes = 1000 };
+ // Updates history of min bitrates.
+ // After this method returns min_bitrate_history_.front().second contains the
+ // min bitrate used during last kBweIncreaseIntervalMs.
+ void UpdateMinHistory(uint32_t now_ms);
- CriticalSectionWrapper* critsect_;
+ std::deque<std::pair<uint32_t, uint32_t> > min_bitrate_history_;
// incoming filters
int accumulate_lost_packets_Q8_;
@@ -63,12 +60,12 @@ class SendSideBandwidthEstimation {
uint32_t min_bitrate_configured_;
uint32_t max_bitrate_configured_;
+ uint32_t time_last_receiver_block_ms_;
uint8_t last_fraction_loss_;
- uint16_t last_round_trip_time_;
+ uint16_t last_round_trip_time_ms_;
uint32_t bwe_incoming_;
- uint32_t time_last_increase_;
- uint32_t time_last_decrease_;
+ uint32_t time_last_decrease_ms_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_BITRATE_CONTROLLER_SEND_SIDE_BANDWIDTH_ESTIMATION_H_
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/OWNERS b/chromium/third_party/webrtc/modules/desktop_capture/OWNERS
index 3276530e93d..e85861b8d3e 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/OWNERS
+++ b/chromium/third_party/webrtc/modules/desktop_capture/OWNERS
@@ -1,3 +1,8 @@
alexeypa@chromium.org
sergeyu@chromium.org
wez@chromium.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc
index 6ed3ae881f2..2547ba37aa8 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.cc
@@ -81,7 +81,7 @@ DesktopFrameWithCursor::DesktopFrameWithCursor(DesktopFrame* frame,
mutable_updated_region()->Swap(frame->mutable_updated_region());
DesktopVector image_pos = position.subtract(cursor.hotspot());
- DesktopRect target_rect = DesktopRect::MakeSize(cursor.image().size());
+ DesktopRect target_rect = DesktopRect::MakeSize(cursor.image()->size());
target_rect.Translate(image_pos);
DesktopVector target_origin = target_rect.top_left();
target_rect.IntersectWith(DesktopRect::MakeSize(size()));
@@ -101,10 +101,10 @@ DesktopFrameWithCursor::DesktopFrameWithCursor(DesktopFrame* frame,
target_rect.left() * DesktopFrame::kBytesPerPixel;
DesktopVector origin_shift = target_rect.top_left().subtract(target_origin);
AlphaBlend(target_rect_data, stride(),
- cursor.image().data() +
- origin_shift.y() * cursor.image().stride() +
+ cursor.image()->data() +
+ origin_shift.y() * cursor.image()->stride() +
origin_shift.x() * DesktopFrame::kBytesPerPixel,
- cursor.image().stride(),
+ cursor.image()->stride(),
target_rect.size());
}
@@ -142,12 +142,16 @@ void DesktopAndCursorComposer::Capture(const DesktopRegion& region) {
desktop_capturer_->Capture(region);
}
+void DesktopAndCursorComposer::SetExcludedWindow(WindowId window) {
+ desktop_capturer_->SetExcludedWindow(window);
+}
+
SharedMemory* DesktopAndCursorComposer::CreateSharedMemory(size_t size) {
return callback_->CreateSharedMemory(size);
}
void DesktopAndCursorComposer::OnCaptureCompleted(DesktopFrame* frame) {
- if (cursor_.get() && cursor_state_ == MouseCursorMonitor::INSIDE) {
+ if (frame && cursor_.get() && cursor_state_ == MouseCursorMonitor::INSIDE) {
DesktopFrameWithCursor* frame_with_cursor =
new DesktopFrameWithCursor(frame, *cursor_, cursor_position_);
frame = frame_with_cursor;
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h
index 4f7c85bde3e..3fac0212d49 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h
@@ -34,6 +34,7 @@ class DesktopAndCursorComposer : public DesktopCapturer,
// DesktopCapturer interface.
virtual void Start(DesktopCapturer::Callback* callback) OVERRIDE;
virtual void Capture(const DesktopRegion& region) OVERRIDE;
+ virtual void SetExcludedWindow(WindowId window) OVERRIDE;
private:
// DesktopCapturer::Callback interface.
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc
index 15d6f546118..b482a29605e 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc
@@ -58,6 +58,18 @@ uint32_t BlendPixels(uint32_t dest, uint32_t src) {
return b + (g << 8) + (r << 16) + 0xff000000;
}
+DesktopFrame* CreateTestFrame() {
+ DesktopFrame* frame =
+ new BasicDesktopFrame(DesktopSize(kScreenWidth, kScreenHeight));
+ uint32_t* data = reinterpret_cast<uint32_t*>(frame->data());
+ for (int y = 0; y < kScreenHeight; ++y) {
+ for (int x = 0; x < kScreenWidth; ++x) {
+ *(data++) = GetFakeFramePixelValue(DesktopVector(x, y));
+ }
+ }
+ return frame;
+}
+
class FakeScreenCapturer : public DesktopCapturer {
public:
FakeScreenCapturer() {}
@@ -67,27 +79,17 @@ class FakeScreenCapturer : public DesktopCapturer {
}
virtual void Capture(const DesktopRegion& region) OVERRIDE {
- DesktopFrame* frame =
- new BasicDesktopFrame(DesktopSize(kScreenWidth, kScreenHeight));
- uint32_t* data = reinterpret_cast<uint32_t*>(frame->data());
- for (int y = 0; y < kScreenHeight; ++y) {
- for (int x = 0; x < kScreenWidth; ++x) {
- *(data++) = GetFakeFramePixelValue(DesktopVector(x, y));
- }
- }
-
- last_frame_.reset(SharedDesktopFrame::Wrap(frame));
-
- callback_->OnCaptureCompleted(last_frame_->Share());
+ callback_->OnCaptureCompleted(next_frame_.release());
}
- // Returns last fake captured frame.
- SharedDesktopFrame* last_frame() { return last_frame_.get(); }
+ void SetNextFrame(DesktopFrame* next_frame) {
+ next_frame_.reset(next_frame);
+ }
private:
Callback* callback_;
- scoped_ptr<SharedDesktopFrame> last_frame_;
+ scoped_ptr<DesktopFrame> next_frame_;
};
class FakeMouseMonitor : public MouseCursorMonitor {
@@ -187,6 +189,20 @@ class DesktopAndCursorComposerTest : public testing::Test,
scoped_ptr<DesktopFrame> frame_;
};
+// Verify DesktopAndCursorComposer can handle the case when the screen capturer
+// fails.
+TEST_F(DesktopAndCursorComposerTest, Error) {
+ blender_.Start(this);
+
+ fake_cursor_->SetHotspot(DesktopVector());
+ fake_cursor_->SetState(MouseCursorMonitor::INSIDE, DesktopVector());
+ fake_screen_->SetNextFrame(NULL);
+
+ blender_.Capture(DesktopRegion());
+
+ EXPECT_EQ(frame_, static_cast<DesktopFrame*>(NULL));
+}
+
TEST_F(DesktopAndCursorComposerTest, Blend) {
struct {
int x, y;
@@ -222,6 +238,10 @@ TEST_F(DesktopAndCursorComposerTest, Blend) {
DesktopVector pos(tests[i].x, tests[i].y);
fake_cursor_->SetState(state, pos);
+ scoped_ptr<SharedDesktopFrame> frame(
+ SharedDesktopFrame::Wrap(CreateTestFrame()));
+ fake_screen_->SetNextFrame(frame->Share());
+
blender_.Capture(DesktopRegion());
VerifyFrame(*frame_, state, pos);
@@ -229,9 +249,7 @@ TEST_F(DesktopAndCursorComposerTest, Blend) {
// Verify that the cursor is erased before the frame buffer is returned to
// the screen capturer.
frame_.reset();
- VerifyFrame(*fake_screen_->last_frame(),
- MouseCursorMonitor::OUTSIDE,
- DesktopVector());
+ VerifyFrame(*frame, MouseCursorMonitor::OUTSIDE, DesktopVector());
}
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi
index eb3bc9a29d1..6f4a083015a 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture.gypi
@@ -36,6 +36,10 @@
"differ_block.h",
"mac/desktop_configuration.h",
"mac/desktop_configuration.mm",
+ "mac/desktop_configuration_monitor.h",
+ "mac/desktop_configuration_monitor.cc",
+ "mac/osx_version.h",
+ "mac/osx_version.cc",
"mac/scoped_pixel_buffer_object.cc",
"mac/scoped_pixel_buffer_object.h",
"mouse_cursor.cc",
@@ -65,9 +69,17 @@
"win/scoped_gdi_object.h",
"win/scoped_thread_desktop.cc",
"win/scoped_thread_desktop.h",
+ "win/screen_capturer_win_gdi.cc",
+ "win/screen_capturer_win_gdi.h",
+ "win/screen_capturer_win_magnifier.cc",
+ "win/screen_capturer_win_magnifier.h",
+ "win/screen_capture_utils.cc",
+ "win/screen_capture_utils.h",
+ "win/window_capture_utils.cc",
+ "win/window_capture_utils.h",
"window_capturer.cc",
"window_capturer.h",
- "window_capturer_mac.cc",
+ "window_capturer_mac.mm",
"window_capturer_win.cc",
"window_capturer_x11.cc",
"x11/shared_x_display.h",
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.cc b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.cc
index a4fa02547bd..105853bf94b 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.cc
@@ -19,6 +19,10 @@ DesktopCaptureOptions::DesktopCaptureOptions()
// XDamage is often broken, so don't use it by default.
use_update_notifications_ = false;
#endif
+
+#if defined(WEBRTC_WIN)
+ allow_use_magnification_api_ = false;
+#endif
}
DesktopCaptureOptions::~DesktopCaptureOptions() {}
@@ -29,6 +33,9 @@ DesktopCaptureOptions DesktopCaptureOptions::CreateDefault() {
#if defined(USE_X11)
result.set_x_display(SharedXDisplay::CreateDefault());
#endif
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ result.set_configuration_monitor(new DesktopConfigurationMonitor());
+#endif
return result;
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.h
index f0c76b1733f..c6aabd4529d 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_options.h
@@ -10,12 +10,17 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_OPTIONS_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_OPTIONS_H_
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/system_wrappers/interface/scoped_refptr.h"
#if defined(USE_X11)
#include "webrtc/modules/desktop_capture/x11/shared_x_display.h"
#endif
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+#include "webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h"
+#endif
+
namespace webrtc {
// An object that stores initialization parameters for screen and window
@@ -38,6 +43,15 @@ class DesktopCaptureOptions {
}
#endif
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ DesktopConfigurationMonitor* configuration_monitor() const {
+ return configuration_monitor_;
+ }
+ void set_configuration_monitor(scoped_refptr<DesktopConfigurationMonitor> m) {
+ configuration_monitor_ = m;
+ }
+#endif
+
// Flag indicating that the capturer should use screen change notifications.
// Enables/disables use of XDAMAGE in the X11 capturer.
bool use_update_notifications() const { return use_update_notifications_; }
@@ -52,10 +66,27 @@ class DesktopCaptureOptions {
disable_effects_ = disable_effects;
}
+#if defined(WEBRTC_WIN)
+ bool allow_use_magnification_api() const {
+ return allow_use_magnification_api_;
+ }
+ void set_allow_use_magnification_api(bool allow) {
+ allow_use_magnification_api_ = allow;
+ }
+#endif
+
private:
#if defined(USE_X11)
scoped_refptr<SharedXDisplay> x_display_;
#endif
+
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+ scoped_refptr<DesktopConfigurationMonitor> configuration_monitor_;
+#endif
+
+#if defined(WEBRTC_WIN)
+ bool allow_use_magnification_api_;
+#endif
bool use_update_notifications_;
bool disable_effects_;
};
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_types.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_types.h
index d43ec499191..3e417965531 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_types.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capture_types.h
@@ -26,6 +26,17 @@ typedef intptr_t WindowId;
const WindowId kNullWindowId = 0;
+// Type used to identify screens on the desktop. Values are platform-specific:
+// - On Windows: integer display device index.
+// - On OSX: CGDirectDisplayID cast to intptr_t.
+// - On Linux (with X11): TBD.
+typedef intptr_t ScreenId;
+
+// The screen id corresponds to all screen combined together.
+const ScreenId kFullDesktopScreenId = -1;
+
+const ScreenId kInvalidScreenId = -2;
+
} // namespace webrtc
#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_TYPES_H_
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h
index bcb664ef859..7ad16364977 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_capturer.h
@@ -13,6 +13,8 @@
#include <stddef.h>
+#include "webrtc/modules/desktop_capture/desktop_capture_types.h"
+
namespace webrtc {
class DesktopFrame;
@@ -52,6 +54,11 @@ class DesktopCapturer {
// the top left corner of the capture target. Pending capture operations are
// canceled when DesktopCapturer is deleted.
virtual void Capture(const DesktopRegion& region) = 0;
+
+ // Sets the window to be excluded from the captured image in the future
+ // Capture calls. Used to exclude the screenshare notification window for
+ // screen capturing.
+ virtual void SetExcludedWindow(WindowId window) {}
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_geometry.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_geometry.h
index e51273d8d21..047eeec3d9c 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_geometry.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_geometry.h
@@ -11,8 +11,8 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_GEOMETRY_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_DESKTOP_GEOMETRY_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/typedefs.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
namespace webrtc {
@@ -58,7 +58,7 @@ class DesktopSize {
int32_t width() const { return width_; }
int32_t height() const { return height_; }
- bool is_empty() const { return width_ <= 0 && height_ <= 0; }
+ bool is_empty() const { return width_ <= 0 || height_ <= 0; }
bool equals(const DesktopSize& other) const {
return width_ == other.width_ && height_ == other.height_;
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h b/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h
index fc7c6ed9e2d..c4528ae3496 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/desktop_region.h
@@ -14,8 +14,8 @@
#include <map>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/differ.h b/chromium/third_party/webrtc/modules/desktop_capture/differ.h
index 8edce80b4ef..0b419d2dded 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/differ.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/differ.h
@@ -76,7 +76,7 @@ class Differ {
int bytes_per_row_;
// Diff information for each block in the image.
- scoped_array<DiffInfo> diff_info_;
+ scoped_ptr<DiffInfo[]> diff_info_;
// Dimensions and total size of diff info array.
int diff_info_width_;
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc b/chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc
index 40fde4dbc4e..da1a21461dc 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/differ_unittest.cc
@@ -200,8 +200,8 @@ class DifferTest : public testing::Test {
int buffer_size_;
// Previous and current screen buffers.
- scoped_array<uint8_t> prev_;
- scoped_array<uint8_t> curr_;
+ scoped_ptr<uint8_t[]> prev_;
+ scoped_ptr<uint8_t[]> curr_;
private:
DISALLOW_COPY_AND_ASSIGN(DifferTest);
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.h b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.h
index 433040a04ee..bb2339bb0f1 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.h
@@ -52,10 +52,18 @@ struct MacDesktopConfiguration {
// increase as you move up the screen) or Carbon-style "top-down" coordinates.
static MacDesktopConfiguration GetCurrent(Origin origin);
- // Bounds of the desktop in Density-Independent Pixels (DIPs).
+ // Returns true if the given desktop configuration equals this one.
+ bool Equals(const MacDesktopConfiguration& other);
+
+ // Returns the pointer to the display configuration with the specified id.
+ const MacDisplayConfiguration* FindDisplayConfigurationById(
+ CGDirectDisplayID id);
+
+ // Bounds of the desktop excluding monitors with DPI settings different from
+ // the main monitor. In Density-Independent Pixels (DIPs).
DesktopRect bounds;
- // Bounds of the desktop in physical pixels.
+ // Same as bounds, but expressed in physical pixels.
DesktopRect pixel_bounds;
// Scale factor from DIPs to physical pixels.
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.mm b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.mm
index a917b5dc052..35fa65be2d2 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.mm
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration.mm
@@ -110,15 +110,8 @@ MacDesktopConfiguration MacDesktopConfiguration::GetCurrent(Origin origin) {
MacDisplayConfiguration display_config =
GetConfigurationForScreen([screens objectAtIndex: i]);
- // Handling mixed-DPI is hard, so we only return displays that match the
- // "primary" display's DPI. The primary display is always the first in the
- // list returned by [NSScreen screens].
- if (i == 0) {
+ if (i == 0)
desktop_config.dip_to_pixel_scale = display_config.dip_to_pixel_scale;
- } else if (desktop_config.dip_to_pixel_scale !=
- display_config.dip_to_pixel_scale) {
- continue;
- }
// Cocoa uses bottom-up coordinates, so if the caller wants top-down then
// we need to invert the positions of secondary monitors relative to the
@@ -126,21 +119,62 @@ MacDesktopConfiguration MacDesktopConfiguration::GetCurrent(Origin origin) {
if (i > 0 && origin == TopLeftOrigin) {
InvertRectYOrigin(desktop_config.displays[0].bounds,
&display_config.bounds);
- InvertRectYOrigin(desktop_config.displays[0].pixel_bounds,
- &display_config.pixel_bounds);
+ // |display_bounds| is density dependent, so we need to convert the
+ // primay monitor's position into the secondary monitor's density context.
+ float scaling_factor = display_config.dip_to_pixel_scale /
+ desktop_config.displays[0].dip_to_pixel_scale;
+ DesktopRect primary_bounds = DesktopRect::MakeLTRB(
+ desktop_config.displays[0].pixel_bounds.left() * scaling_factor,
+ desktop_config.displays[0].pixel_bounds.top() * scaling_factor,
+ desktop_config.displays[0].pixel_bounds.right() * scaling_factor,
+ desktop_config.displays[0].pixel_bounds.bottom() * scaling_factor);
+ InvertRectYOrigin(primary_bounds, &display_config.pixel_bounds);
}
// Add the display to the configuration.
desktop_config.displays.push_back(display_config);
- // Update the desktop bounds to account for this display.
- desktop_config.bounds =
- JoinRects(desktop_config.bounds, display_config.bounds);
- desktop_config.pixel_bounds =
- JoinRects(desktop_config.pixel_bounds, display_config.pixel_bounds);
+ // Update the desktop bounds to account for this display, unless the current
+ // display uses different DPI settings.
+ if (display_config.dip_to_pixel_scale ==
+ desktop_config.dip_to_pixel_scale) {
+ desktop_config.bounds =
+ JoinRects(desktop_config.bounds, display_config.bounds);
+ desktop_config.pixel_bounds =
+ JoinRects(desktop_config.pixel_bounds, display_config.pixel_bounds);
+ }
}
return desktop_config;
}
+// For convenience of comparing MacDisplayConfigurations in
+// MacDesktopConfiguration::Equals.
+bool operator==(const MacDisplayConfiguration& left,
+ const MacDisplayConfiguration& right) {
+ return left.id == right.id &&
+ left.bounds.equals(right.bounds) &&
+ left.pixel_bounds.equals(right.pixel_bounds) &&
+ left.dip_to_pixel_scale == right.dip_to_pixel_scale;
+}
+
+bool MacDesktopConfiguration::Equals(const MacDesktopConfiguration& other) {
+ return bounds.equals(other.bounds) &&
+ pixel_bounds.equals(other.pixel_bounds) &&
+ dip_to_pixel_scale == other.dip_to_pixel_scale &&
+ displays == other.displays;
+}
+
+// Finds the display configuration with the specified id.
+const MacDisplayConfiguration*
+MacDesktopConfiguration::FindDisplayConfigurationById(
+ CGDirectDisplayID id) {
+ for (MacDisplayConfigurations::const_iterator it = displays.begin();
+ it != displays.end(); ++it) {
+ if (it->id == id)
+ return &(*it);
+ }
+ return NULL;
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.cc b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.cc
new file mode 100644
index 00000000000..f0d5c34be65
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.cc
@@ -0,0 +1,91 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h"
+
+#include "webrtc/modules/desktop_capture/mac/desktop_configuration.h"
+#include "webrtc/system_wrappers/interface/event_wrapper.h"
+#include "webrtc/system_wrappers/interface/logging.h"
+
+namespace webrtc {
+
+// The amount of time allowed for displays to reconfigure.
+static const int64_t kDisplayConfigurationEventTimeoutMs = 10 * 1000;
+
+DesktopConfigurationMonitor::DesktopConfigurationMonitor()
+ : ref_count_(0),
+ display_configuration_capture_event_(EventWrapper::Create()) {
+ CGError err = CGDisplayRegisterReconfigurationCallback(
+ DesktopConfigurationMonitor::DisplaysReconfiguredCallback, this);
+ if (err != kCGErrorSuccess) {
+ LOG(LS_ERROR) << "CGDisplayRegisterReconfigurationCallback " << err;
+ abort();
+ }
+ display_configuration_capture_event_->Set();
+
+ desktop_configuration_ = MacDesktopConfiguration::GetCurrent(
+ MacDesktopConfiguration::TopLeftOrigin);
+}
+
+DesktopConfigurationMonitor::~DesktopConfigurationMonitor() {
+ CGError err = CGDisplayRemoveReconfigurationCallback(
+ DesktopConfigurationMonitor::DisplaysReconfiguredCallback, this);
+ if (err != kCGErrorSuccess)
+ LOG(LS_ERROR) << "CGDisplayRemoveReconfigurationCallback " << err;
+}
+
+void DesktopConfigurationMonitor::Lock() {
+ if (!display_configuration_capture_event_->Wait(
+ kDisplayConfigurationEventTimeoutMs)) {
+ LOG_F(LS_ERROR) << "Event wait timed out.";
+ abort();
+ }
+}
+
+void DesktopConfigurationMonitor::Unlock() {
+ display_configuration_capture_event_->Set();
+}
+
+// static
+void DesktopConfigurationMonitor::DisplaysReconfiguredCallback(
+ CGDirectDisplayID display,
+ CGDisplayChangeSummaryFlags flags,
+ void *user_parameter) {
+ DesktopConfigurationMonitor* monitor =
+ reinterpret_cast<DesktopConfigurationMonitor*>(user_parameter);
+ monitor->DisplaysReconfigured(display, flags);
+}
+
+void DesktopConfigurationMonitor::DisplaysReconfigured(
+ CGDirectDisplayID display,
+ CGDisplayChangeSummaryFlags flags) {
+ if (flags & kCGDisplayBeginConfigurationFlag) {
+ if (reconfiguring_displays_.empty()) {
+ // If this is the first display to start reconfiguring then wait on
+ // |display_configuration_capture_event_| to block the capture thread
+ // from accessing display memory until the reconfiguration completes.
+ if (!display_configuration_capture_event_->Wait(
+ kDisplayConfigurationEventTimeoutMs)) {
+ LOG_F(LS_ERROR) << "Event wait timed out.";
+ abort();
+ }
+ }
+ reconfiguring_displays_.insert(display);
+ } else {
+ reconfiguring_displays_.erase(display);
+ if (reconfiguring_displays_.empty()) {
+ desktop_configuration_ = MacDesktopConfiguration::GetCurrent(
+ MacDesktopConfiguration::TopLeftOrigin);
+ display_configuration_capture_event_->Set();
+ }
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h
new file mode 100644
index 00000000000..27143a84e1e
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_MAC_DESKTOP_CONFIGURATION_MONITOR_H_
+#define WEBRTC_MODULES_DESKTOP_CAPTURE_MAC_DESKTOP_CONFIGURATION_MONITOR_H_
+
+#include <ApplicationServices/ApplicationServices.h>
+
+#include <set>
+
+#include "webrtc/modules/desktop_capture/mac/desktop_configuration.h"
+#include "webrtc/system_wrappers/interface/atomic32.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class EventWrapper;
+
+// The class provides functions to synchronize capturing and display
+// reconfiguring across threads, and the up-to-date MacDesktopConfiguration.
+class DesktopConfigurationMonitor {
+ public:
+ DesktopConfigurationMonitor();
+ // Acquires a lock on the current configuration.
+ void Lock();
+ // Releases the lock previously acquired.
+ void Unlock();
+ // Returns the current desktop configuration. Should only be called when the
+ // lock has been acquired.
+ const MacDesktopConfiguration& desktop_configuration() {
+ return desktop_configuration_;
+ }
+
+ void AddRef() { ++ref_count_; }
+ void Release() {
+ if (--ref_count_ == 0)
+ delete this;
+ }
+
+ private:
+ static void DisplaysReconfiguredCallback(CGDirectDisplayID display,
+ CGDisplayChangeSummaryFlags flags,
+ void *user_parameter);
+ ~DesktopConfigurationMonitor();
+
+ void DisplaysReconfigured(CGDirectDisplayID display,
+ CGDisplayChangeSummaryFlags flags);
+
+ Atomic32 ref_count_;
+ std::set<CGDirectDisplayID> reconfiguring_displays_;
+ MacDesktopConfiguration desktop_configuration_;
+ scoped_ptr<EventWrapper> display_configuration_capture_event_;
+
+ DISALLOW_COPY_AND_ASSIGN(DesktopConfigurationMonitor);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_MAC_DESKTOP_CONFIGURATION_MONITOR_H_
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/osx_version.cc b/chromium/third_party/webrtc/modules/desktop_capture/mac/osx_version.cc
new file mode 100644
index 00000000000..7466f20342b
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/osx_version.cc
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <sys/utsname.h>
+
+#include "webrtc/system_wrappers/interface/logging.h"
+
+namespace webrtc {
+
+namespace {
+
+int GetDarwinVersion() {
+ struct utsname uname_info;
+ if (uname(&uname_info) != 0) {
+ LOG(LS_ERROR) << "uname failed";
+ return 0;
+ }
+
+ if (strcmp(uname_info.sysname, "Darwin") != 0)
+ return 0;
+
+ char* dot;
+ int result = strtol(uname_info.release, &dot, 10);
+ if (*dot != '.') {
+ LOG(LS_ERROR) << "Failed to parse version";
+ return 0;
+ }
+
+ return result;
+}
+
+} // namespace
+
+bool IsOSLionOrLater() {
+ static int darwin_version = GetDarwinVersion();
+
+ // Verify that the version has been parsed correctly.
+ if (darwin_version < 6) {
+ LOG_F(LS_ERROR) << "Invalid Darwin version: " << darwin_version;
+ abort();
+ }
+
+ // Darwin major version 11 corresponds to OSX 10.7.
+ return darwin_version >= 11;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/osx_version.h b/chromium/third_party/webrtc/modules/desktop_capture/mac/osx_version.h
new file mode 100644
index 00000000000..0ba49a4e69e
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/osx_version.h
@@ -0,0 +1,16 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+namespace webrtc {
+
+// Returns true if the OS version >= OSX 10.7.
+bool IsOSLionOrLater();
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mac/scoped_pixel_buffer_object.h b/chromium/third_party/webrtc/modules/desktop_capture/mac/scoped_pixel_buffer_object.h
index 73d425aea23..4d1dd1ffd6a 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mac/scoped_pixel_buffer_object.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mac/scoped_pixel_buffer_object.h
@@ -14,7 +14,7 @@
#include <OpenGL/CGLMacro.h>
#include <OpenGL/OpenGL.h>
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.cc b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.cc
index 3f1ab3ddf50..22a9c0ee8c8 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.cc
@@ -10,10 +10,14 @@
#include "webrtc/modules/desktop_capture/mouse_cursor.h"
+#include <assert.h>
+
#include "webrtc/modules/desktop_capture/desktop_frame.h"
namespace webrtc {
+MouseCursor::MouseCursor() {}
+
MouseCursor::MouseCursor(DesktopFrame* image, const DesktopVector& hotspot)
: image_(image),
hotspot_(hotspot) {
@@ -25,8 +29,10 @@ MouseCursor::~MouseCursor() {}
// static
MouseCursor* MouseCursor::CopyOf(const MouseCursor& cursor) {
- return new MouseCursor(BasicDesktopFrame::CopyOf(cursor.image()),
- cursor.hotspot());
+ return cursor.image()
+ ? new MouseCursor(BasicDesktopFrame::CopyOf(*cursor.image()),
+ cursor.hotspot())
+ : new MouseCursor();
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.h b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.h
index 4cf770830ce..22887f9ae47 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor.h
@@ -11,8 +11,8 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_MOUSE_CURSOR_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
@@ -21,13 +21,19 @@ class DesktopFrame;
class MouseCursor {
public:
+ MouseCursor();
+
// Takes ownership of |image|. |hotspot| must be within |image| boundaries.
MouseCursor(DesktopFrame* image, const DesktopVector& hotspot);
+
~MouseCursor();
static MouseCursor* CopyOf(const MouseCursor& cursor);
- const DesktopFrame& image() const { return *image_; }
+ void set_image(DesktopFrame* image) { image_.reset(image); }
+ const DesktopFrame* image() const { return image_.get(); }
+
+ void set_hotspot(const DesktopVector& hotspot ) { hotspot_ = hotspot; }
const DesktopVector& hotspot() const { return hotspot_; }
private:
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor.h b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor.h
index 9785b736b2a..24dfe72dfa7 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor.h
@@ -69,7 +69,8 @@ class MouseCursorMonitor {
//
// TODO(sergeyu): Provide a way to select a specific screen.
static MouseCursorMonitor* CreateForScreen(
- const DesktopCaptureOptions& options);
+ const DesktopCaptureOptions& options,
+ ScreenId screen);
// Initializes the monitor with the |callback|, which must remain valid until
// capturer is destroyed.
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_mac.mm b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_mac.mm
index d3c02896f9e..e8806338197 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_mac.mm
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_mac.mm
@@ -15,35 +15,59 @@
#include <Cocoa/Cocoa.h>
#include <CoreFoundation/CoreFoundation.h>
+#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
+#include "webrtc/modules/desktop_capture/mac/desktop_configuration.h"
+#include "webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h"
+#include "webrtc/modules/desktop_capture/mac/osx_version.h"
#include "webrtc/modules/desktop_capture/mouse_cursor.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/scoped_refptr.h"
namespace webrtc {
class MouseCursorMonitorMac : public MouseCursorMonitor {
public:
- MouseCursorMonitorMac(CGWindowID window_id);
+ MouseCursorMonitorMac(const DesktopCaptureOptions& options,
+ CGWindowID window_id,
+ ScreenId screen_id);
virtual ~MouseCursorMonitorMac();
virtual void Init(Callback* callback, Mode mode) OVERRIDE;
virtual void Capture() OVERRIDE;
private:
+ static void DisplaysReconfiguredCallback(CGDirectDisplayID display,
+ CGDisplayChangeSummaryFlags flags,
+ void *user_parameter);
+ void DisplaysReconfigured(CGDirectDisplayID display,
+ CGDisplayChangeSummaryFlags flags);
+
void CaptureImage();
+ scoped_refptr<DesktopConfigurationMonitor> configuration_monitor_;
CGWindowID window_id_;
-
+ ScreenId screen_id_;
Callback* callback_;
Mode mode_;
-
scoped_ptr<MouseCursor> last_cursor_;
};
-MouseCursorMonitorMac::MouseCursorMonitorMac(CGWindowID window_id)
- : window_id_(window_id),
+MouseCursorMonitorMac::MouseCursorMonitorMac(
+ const DesktopCaptureOptions& options,
+ CGWindowID window_id,
+ ScreenId screen_id)
+ : configuration_monitor_(options.configuration_monitor()),
+ window_id_(window_id),
+ screen_id_(screen_id),
callback_(NULL),
mode_(SHAPE_AND_POSITION) {
+ assert(window_id == kCGNullWindowID || screen_id == kInvalidScreenId);
+ if (screen_id != kInvalidScreenId && !IsOSLionOrLater()) {
+ // Single screen capture is not supported on pre OS X 10.7.
+ screen_id_ = kFullDesktopScreenId;
+ }
}
MouseCursorMonitorMac::~MouseCursorMonitorMac() {}
@@ -72,6 +96,21 @@ void MouseCursorMonitorMac::Capture() {
DesktopVector position(gc_position.x, gc_position.y);
+ configuration_monitor_->Lock();
+ MacDesktopConfiguration configuration =
+ configuration_monitor_->desktop_configuration();
+ configuration_monitor_->Unlock();
+ float scale = 1.0f;
+
+ // Find the dpi to physical pixel scale for the screen where the mouse cursor
+ // is.
+ for (MacDisplayConfigurations::iterator it = configuration.displays.begin();
+ it != configuration.displays.end(); ++it) {
+ if (it->bounds.Contains(position)) {
+ scale = it->dip_to_pixel_scale;
+ break;
+ }
+ }
// If we are capturing cursor for a specific window then we need to figure out
// if the current mouse position is covered by another window and also adjust
// |position| to make it relative to the window origin.
@@ -134,10 +173,8 @@ void MouseCursorMonitorMac::Capture() {
}
}
}
-
CFRelease(window_array);
}
-
if (!found_window) {
// If we failed to get list of windows or the window wasn't in the list
// pretend that the cursor is outside the window. This can happen, e.g. if
@@ -145,8 +182,32 @@ void MouseCursorMonitorMac::Capture() {
state = OUTSIDE;
position.set(-1, -1);
}
+ } else {
+ assert(screen_id_ >= kFullDesktopScreenId);
+ if (screen_id_ != kFullDesktopScreenId) {
+ // For single screen capturing, convert the position to relative to the
+ // target screen.
+ const MacDisplayConfiguration* config =
+ configuration.FindDisplayConfigurationById(
+ static_cast<CGDirectDisplayID>(screen_id_));
+ if (config) {
+ if (!config->pixel_bounds.Contains(position))
+ state = OUTSIDE;
+ position = position.subtract(config->bounds.top_left());
+ } else {
+ // The target screen is no longer valid.
+ state = OUTSIDE;
+ position.set(-1, -1);
+ }
+ } else {
+ position.subtract(configuration.bounds.top_left());
+ }
+ }
+ if (state == INSIDE) {
+ // Convert Density Independent Pixel to physical pixel.
+ position = DesktopVector(round(position.x() * scale),
+ round(position.y() * scale));
}
-
callback_->OnMouseCursorPosition(state, position);
}
@@ -182,10 +243,10 @@ void MouseCursorMonitorMac::CaptureImage() {
// Compare the cursor with the previous one.
if (last_cursor_.get() &&
- last_cursor_->image().size().equals(size) &&
+ last_cursor_->image()->size().equals(size) &&
last_cursor_->hotspot().equals(hotspot) &&
- memcmp(last_cursor_->image().data(), src_data,
- last_cursor_->image().stride() * size.height()) == 0) {
+ memcmp(last_cursor_->image()->data(), src_data,
+ last_cursor_->image()->stride() * size.height()) == 0) {
return;
}
@@ -204,15 +265,15 @@ void MouseCursorMonitorMac::CaptureImage() {
callback_->OnMouseCursor(cursor.release());
}
-
MouseCursorMonitor* MouseCursorMonitor::CreateForWindow(
const DesktopCaptureOptions& options, WindowId window) {
- return new MouseCursorMonitorMac(window);
+ return new MouseCursorMonitorMac(options, window, kInvalidScreenId);
}
MouseCursorMonitor* MouseCursorMonitor::CreateForScreen(
- const DesktopCaptureOptions& options) {
- return new MouseCursorMonitorMac(kCGNullWindowID);
+ const DesktopCaptureOptions& options,
+ ScreenId screen) {
+ return new MouseCursorMonitorMac(options, kCGNullWindowID, screen);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_null.cc b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_null.cc
index 7aa1b7141d5..3a632cc0d9c 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_null.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_null.cc
@@ -10,7 +10,7 @@
#include "webrtc/modules/desktop_capture/mouse_cursor_monitor.h"
-#include <cstddef>
+#include <stddef.h>
namespace webrtc {
@@ -21,7 +21,8 @@ MouseCursorMonitor* MouseCursorMonitor::CreateForWindow(
}
MouseCursorMonitor* MouseCursorMonitor::CreateForScreen(
- const DesktopCaptureOptions& options) {
+ const DesktopCaptureOptions& options,
+ ScreenId screen) {
return NULL;
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_unittest.cc b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_unittest.cc
index 18bf1ca40e0..c6af2b700e4 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_unittest.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_unittest.cc
@@ -50,7 +50,12 @@ class MouseCursorMonitorTest : public testing::Test,
// tests. Figure out how to do that without breaking other tests in
// modules_unittests and enable these tests on Mac.
// https://code.google.com/p/webrtc/issues/detail?id=2532
-#if !defined(WEBRTC_MAC)
+//
+// Disabled on Windows due to flake, see:
+// https://code.google.com/p/webrtc/issues/detail?id=3408
+// Disabled on Linux due to flake, see:
+// https://code.google.com/p/webrtc/issues/detail?id=3245
+#if !defined(WEBRTC_MAC) && !defined(WEBRTC_WIN) && !defined(WEBRTC_LINUX)
#define MAYBE(x) x
#else
#define MAYBE(x) DISABLED_##x
@@ -58,7 +63,7 @@ class MouseCursorMonitorTest : public testing::Test,
TEST_F(MouseCursorMonitorTest, MAYBE(FromScreen)) {
scoped_ptr<MouseCursorMonitor> capturer(MouseCursorMonitor::CreateForScreen(
- DesktopCaptureOptions::CreateDefault()));
+ DesktopCaptureOptions::CreateDefault(), webrtc::kFullDesktopScreenId));
assert(capturer.get());
capturer->Init(this, MouseCursorMonitor::SHAPE_AND_POSITION);
capturer->Capture();
@@ -66,10 +71,10 @@ TEST_F(MouseCursorMonitorTest, MAYBE(FromScreen)) {
EXPECT_TRUE(cursor_image_.get());
EXPECT_GE(cursor_image_->hotspot().x(), 0);
EXPECT_LE(cursor_image_->hotspot().x(),
- cursor_image_->image().size().width());
+ cursor_image_->image()->size().width());
EXPECT_GE(cursor_image_->hotspot().y(), 0);
EXPECT_LE(cursor_image_->hotspot().y(),
- cursor_image_->image().size().height());
+ cursor_image_->image()->size().height());
EXPECT_TRUE(position_received_);
EXPECT_EQ(MouseCursorMonitor::INSIDE, state_);
@@ -109,7 +114,7 @@ TEST_F(MouseCursorMonitorTest, MAYBE(FromWindow)) {
// Make sure that OnMouseCursorPosition() is not called in the SHAPE_ONLY mode.
TEST_F(MouseCursorMonitorTest, MAYBE(ShapeOnly)) {
scoped_ptr<MouseCursorMonitor> capturer(MouseCursorMonitor::CreateForScreen(
- DesktopCaptureOptions::CreateDefault()));
+ DesktopCaptureOptions::CreateDefault(), webrtc::kFullDesktopScreenId));
assert(capturer.get());
capturer->Init(this, MouseCursorMonitor::SHAPE_ONLY);
capturer->Capture();
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc
index 82f7d2447fb..fd0b222a3a6 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_win.cc
@@ -10,9 +10,12 @@
#include "webrtc/modules/desktop_capture/mouse_cursor_monitor.h"
+#include <assert.h>
+
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/mouse_cursor.h"
#include "webrtc/modules/desktop_capture/win/cursor.h"
+#include "webrtc/modules/desktop_capture/win/window_capture_utils.h"
#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -20,13 +23,20 @@ namespace webrtc {
class MouseCursorMonitorWin : public MouseCursorMonitor {
public:
explicit MouseCursorMonitorWin(HWND window);
+ explicit MouseCursorMonitorWin(ScreenId screen);
virtual ~MouseCursorMonitorWin();
virtual void Init(Callback* callback, Mode mode) OVERRIDE;
virtual void Capture() OVERRIDE;
private:
+ // Get the rect of the currently selected screen, relative to the primary
+ // display's top-left. If the screen is disabled or disconnected, or any error
+ // happens, an empty rect is returned.
+ DesktopRect GetScreenRect();
+
HWND window_;
+ ScreenId screen_;
Callback* callback_;
Mode mode_;
@@ -38,12 +48,23 @@ class MouseCursorMonitorWin : public MouseCursorMonitor {
MouseCursorMonitorWin::MouseCursorMonitorWin(HWND window)
: window_(window),
+ screen_(kInvalidScreenId),
callback_(NULL),
mode_(SHAPE_AND_POSITION),
desktop_dc_(NULL),
last_cursor_(NULL) {
}
+MouseCursorMonitorWin::MouseCursorMonitorWin(ScreenId screen)
+ : window_(NULL),
+ screen_(screen),
+ callback_(NULL),
+ mode_(SHAPE_AND_POSITION),
+ desktop_dc_(NULL),
+ last_cursor_(NULL) {
+ assert(screen >= kFullDesktopScreenId);
+}
+
MouseCursorMonitorWin::~MouseCursorMonitorWin() {
if (desktop_dc_)
ReleaseDC(NULL, desktop_dc_);
@@ -85,28 +106,68 @@ void MouseCursorMonitorWin::Capture() {
bool inside = cursor_info.flags == CURSOR_SHOWING;
if (window_) {
- RECT rect;
- if (!GetWindowRect(window_, &rect)) {
+ DesktopRect original_rect;
+ DesktopRect cropped_rect;
+ if (!GetCroppedWindowRect(window_, &cropped_rect, &original_rect)) {
position.set(0, 0);
inside = false;
} else {
- position = position.subtract(DesktopVector(rect.left, rect.top));
- if (inside)
- inside = (window_ == WindowFromPoint(cursor_info.ptScreenPos));
+ if (inside) {
+ HWND windowUnderCursor = WindowFromPoint(cursor_info.ptScreenPos);
+ inside = windowUnderCursor ?
+ (window_ == GetAncestor(windowUnderCursor, GA_ROOT)) : false;
+ }
+ position = position.subtract(cropped_rect.top_left());
}
+ } else {
+ assert(screen_ != kInvalidScreenId);
+ DesktopRect rect = GetScreenRect();
+ if (inside)
+ inside = rect.Contains(position);
+ position = position.subtract(rect.top_left());
}
callback_->OnMouseCursorPosition(inside ? INSIDE : OUTSIDE, position);
}
+DesktopRect MouseCursorMonitorWin::GetScreenRect() {
+ assert(screen_ != kInvalidScreenId);
+ if (screen_ == kFullDesktopScreenId) {
+ return DesktopRect::MakeXYWH(
+ GetSystemMetrics(SM_XVIRTUALSCREEN),
+ GetSystemMetrics(SM_YVIRTUALSCREEN),
+ GetSystemMetrics(SM_CXVIRTUALSCREEN),
+ GetSystemMetrics(SM_CYVIRTUALSCREEN));
+ }
+ DISPLAY_DEVICE device;
+ device.cb = sizeof(device);
+ BOOL result = EnumDisplayDevices(NULL, screen_, &device, 0);
+ if (!result)
+ return DesktopRect();
+
+ DEVMODE device_mode;
+ device_mode.dmSize = sizeof(device_mode);
+ device_mode.dmDriverExtra = 0;
+ result = EnumDisplaySettingsEx(
+ device.DeviceName, ENUM_CURRENT_SETTINGS, &device_mode, 0);
+ if (!result)
+ return DesktopRect();
+
+ return DesktopRect::MakeXYWH(device_mode.dmPosition.x,
+ device_mode.dmPosition.y,
+ device_mode.dmPelsWidth,
+ device_mode.dmPelsHeight);
+}
+
MouseCursorMonitor* MouseCursorMonitor::CreateForWindow(
const DesktopCaptureOptions& options, WindowId window) {
return new MouseCursorMonitorWin(reinterpret_cast<HWND>(window));
}
MouseCursorMonitor* MouseCursorMonitor::CreateForScreen(
- const DesktopCaptureOptions& options) {
- return new MouseCursorMonitorWin(NULL);
+ const DesktopCaptureOptions& options,
+ ScreenId screen) {
+ return new MouseCursorMonitorWin(screen);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_x11.cc b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_x11.cc
index 9114b95f3b4..f09593db9d6 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_x11.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor_x11.cc
@@ -17,6 +17,7 @@
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/mouse_cursor.h"
+#include "webrtc/modules/desktop_capture/x11/x_error_trap.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@@ -145,10 +146,12 @@ void MouseCursorMonitorX11::Capture() {
Window root_window;
Window child_window;
unsigned int mask;
+
+ XErrorTrap error_trap(display());
Bool result = XQueryPointer(display(), window_, &root_window, &child_window,
&root_x, &root_y, &win_x, &win_y, &mask);
CursorState state;
- if (!result) {
+ if (!result || error_trap.GetLastErrorAndDisable() != 0) {
state = OUTSIDE;
} else {
// In screen mode (window_ == root_window) the mouse is always inside.
@@ -214,7 +217,8 @@ MouseCursorMonitor* MouseCursorMonitor::CreateForWindow(
}
MouseCursorMonitor* MouseCursorMonitor::CreateForScreen(
- const DesktopCaptureOptions& options) {
+ const DesktopCaptureOptions& options,
+ ScreenId screen) {
if (!options.x_display())
return NULL;
return new MouseCursorMonitorX11(
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.cc
index b045f05267c..45a3507b923 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capture_frame_queue.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
+#include <assert.h>
#include <algorithm>
#include "webrtc/modules/desktop_capture/desktop_frame.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h
index 9dd3a1050a4..a8d40a72ef8 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer.h
@@ -11,6 +11,9 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_SCREEN_CAPTURER_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_SCREEN_CAPTURER_H_
+#include <vector>
+
+#include "webrtc/modules/desktop_capture/desktop_capture_types.h"
#include "webrtc/modules/desktop_capture/desktop_capturer.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
@@ -40,6 +43,13 @@ struct MouseCursorShape;
// Since data can be read while another capture action is happening.
class ScreenCapturer : public DesktopCapturer {
public:
+ // Use a struct to represent a screen although it has only an id for now,
+ // because we may want to add more fields (e.g. description) in the future.
+ struct Screen {
+ ScreenId id;
+ };
+ typedef std::vector<Screen> ScreenList;
+
// Provides callbacks used by the capturer to pass captured video frames and
// mouse cursor shapes to the processing pipeline.
//
@@ -78,6 +88,15 @@ class ScreenCapturer : public DesktopCapturer {
// remain valid until the capturer is destroyed.
virtual void SetMouseShapeObserver(
MouseShapeObserver* mouse_shape_observer) = 0;
+
+ // Get the list of screens (not containing kFullDesktopScreenId). Returns
+ // false in case of a failure.
+ virtual bool GetScreenList(ScreenList* screens) = 0;
+
+ // Select the screen to be captured. Returns false in case of a failure (e.g.
+ // if there is no screen with the specified id). If this is never called, the
+ // full desktop is captured.
+ virtual bool SelectScreen(ScreenId id) = 0;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.cc
index 75af043c84d..86761c170f0 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_helper.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/desktop_capture/screen_capturer_helper.h"
+#include <assert.h>
#include <algorithm>
#include "webrtc/system_wrappers/interface/logging.h"
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm
index 00639c73918..2d5733906f0 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mac.mm
@@ -19,18 +19,18 @@
#include <IOKit/pwr_mgt/IOPMLib.h>
#include <OpenGL/CGLMacro.h>
#include <OpenGL/OpenGL.h>
-#include <sys/utsname.h>
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/desktop_geometry.h"
#include "webrtc/modules/desktop_capture/desktop_region.h"
#include "webrtc/modules/desktop_capture/mac/desktop_configuration.h"
+#include "webrtc/modules/desktop_capture/mac/desktop_configuration_monitor.h"
+#include "webrtc/modules/desktop_capture/mac/osx_version.h"
#include "webrtc/modules/desktop_capture/mac/scoped_pixel_buffer_object.h"
#include "webrtc/modules/desktop_capture/mouse_cursor_shape.h"
#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
#include "webrtc/modules/desktop_capture/screen_capturer_helper.h"
-#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
@@ -63,7 +63,8 @@ DesktopRect ScaleAndRoundCGRect(const CGRect& rect, float scale) {
static_cast<int>(ceil((rect.origin.y + rect.size.height) * scale)));
}
-// Copy pixels in the |rect| from |src_place| to |dest_plane|.
+// Copy pixels in the |rect| from |src_place| to |dest_plane|. |rect| should be
+// relative to the origin of |src_plane| and |dest_plane|.
void CopyRect(const uint8_t* src_plane,
int src_plane_stride,
uint8_t* dest_plane,
@@ -87,46 +88,110 @@ void CopyRect(const uint8_t* src_plane,
}
}
-int GetDarwinVersion() {
- struct utsname uname_info;
- if (uname(&uname_info) != 0) {
- LOG(LS_ERROR) << "uname failed";
- return 0;
+// Returns an array of CGWindowID for all the on-screen windows except
+// |window_to_exclude|, or NULL if the window is not found or it fails. The
+// caller should release the returned CFArrayRef.
+CFArrayRef CreateWindowListWithExclusion(CGWindowID window_to_exclude) {
+ if (!window_to_exclude)
+ return NULL;
+
+ CFArrayRef all_windows = CGWindowListCopyWindowInfo(
+ kCGWindowListOptionOnScreenOnly, kCGNullWindowID);
+ if (!all_windows)
+ return NULL;
+
+ CFMutableArrayRef returned_array = CFArrayCreateMutable(
+ NULL, CFArrayGetCount(all_windows), NULL);
+
+ bool found = false;
+ for (CFIndex i = 0; i < CFArrayGetCount(all_windows); ++i) {
+ CFDictionaryRef window = reinterpret_cast<CFDictionaryRef>(
+ CFArrayGetValueAtIndex(all_windows, i));
+
+ CFNumberRef id_ref = reinterpret_cast<CFNumberRef>(
+ CFDictionaryGetValue(window, kCGWindowNumber));
+
+ CGWindowID id;
+ CFNumberGetValue(id_ref, kCFNumberIntType, &id);
+ if (id == window_to_exclude) {
+ found = true;
+ continue;
+ }
+ CFArrayAppendValue(returned_array, reinterpret_cast<void *>(id));
}
+ CFRelease(all_windows);
- if (strcmp(uname_info.sysname, "Darwin") != 0)
- return 0;
-
- char* dot;
- int result = strtol(uname_info.release, &dot, 10);
- if (*dot != '.') {
- LOG(LS_ERROR) << "Failed to parse version";
- return 0;
+ if (!found) {
+ CFRelease(returned_array);
+ returned_array = NULL;
}
-
- return result;
+ return returned_array;
}
-bool IsOSLionOrLater() {
- static int darwin_version = GetDarwinVersion();
-
- // Verify that the version has been parsed correctly.
- if (darwin_version < 6) {
- LOG_F(LS_ERROR) << "Invalid Darwin version: " << darwin_version;
- abort();
+// Returns the bounds of |window| in physical pixels, enlarged by a small amount
+// on four edges to take account of the border/shadow effects.
+DesktopRect GetExcludedWindowPixelBounds(CGWindowID window,
+ float dip_to_pixel_scale) {
+ // The amount of pixels to add to the actual window bounds to take into
+ // account of the border/shadow effects.
+ static const int kBorderEffectSize = 20;
+ CGRect rect;
+ CGWindowID ids[1];
+ ids[0] = window;
+
+ CFArrayRef window_id_array =
+ CFArrayCreate(NULL, reinterpret_cast<const void **>(&ids), 1, NULL);
+ CFArrayRef window_array =
+ CGWindowListCreateDescriptionFromArray(window_id_array);
+
+ if (CFArrayGetCount(window_array) > 0) {
+ CFDictionaryRef window = reinterpret_cast<CFDictionaryRef>(
+ CFArrayGetValueAtIndex(window_array, 0));
+ CFDictionaryRef bounds_ref = reinterpret_cast<CFDictionaryRef>(
+ CFDictionaryGetValue(window, kCGWindowBounds));
+ CGRectMakeWithDictionaryRepresentation(bounds_ref, &rect);
}
- // Darwin major version 11 corresponds to OSX 10.7.
- return darwin_version >= 11;
+ CFRelease(window_id_array);
+ CFRelease(window_array);
+
+ rect.origin.x -= kBorderEffectSize;
+ rect.origin.y -= kBorderEffectSize;
+ rect.size.width += kBorderEffectSize * 2;
+ rect.size.height += kBorderEffectSize * 2;
+ // |rect| is in DIP, so convert to physical pixels.
+ return ScaleAndRoundCGRect(rect, dip_to_pixel_scale);
}
-// The amount of time allowed for displays to reconfigure.
-const int64_t kDisplayConfigurationEventTimeoutMs = 10 * 1000;
+// Create an image of the given region using the given |window_list|.
+// |pixel_bounds| should be in the primary display's coordinate in physical
+// pixels. The caller should release the returned CGImageRef and CFDataRef.
+CGImageRef CreateExcludedWindowRegionImage(const DesktopRect& pixel_bounds,
+ float dip_to_pixel_scale,
+ CFArrayRef window_list,
+ CFDataRef* data_ref) {
+ CGRect window_bounds;
+ // The origin is in DIP while the size is in physical pixels. That's what
+ // CGWindowListCreateImageFromArray expects.
+ window_bounds.origin.x = pixel_bounds.left() / dip_to_pixel_scale;
+ window_bounds.origin.y = pixel_bounds.top() / dip_to_pixel_scale;
+ window_bounds.size.width = pixel_bounds.width();
+ window_bounds.size.height = pixel_bounds.height();
+
+ CGImageRef excluded_image = CGWindowListCreateImageFromArray(
+ window_bounds, window_list, kCGWindowImageDefault);
+
+ CGDataProviderRef provider = CGImageGetDataProvider(excluded_image);
+ *data_ref = CGDataProviderCopyData(provider);
+ assert(*data_ref);
+ return excluded_image;
+}
// A class to perform video frame capturing for mac.
class ScreenCapturerMac : public ScreenCapturer {
public:
- ScreenCapturerMac();
+ explicit ScreenCapturerMac(
+ scoped_refptr<DesktopConfigurationMonitor> desktop_config_monitor);
virtual ~ScreenCapturerMac();
bool Init();
@@ -134,8 +199,11 @@ class ScreenCapturerMac : public ScreenCapturer {
// Overridden from ScreenCapturer:
virtual void Start(Callback* callback) OVERRIDE;
virtual void Capture(const DesktopRegion& region) OVERRIDE;
+ virtual void SetExcludedWindow(WindowId window) OVERRIDE;
virtual void SetMouseShapeObserver(
MouseShapeObserver* mouse_shape_observer) OVERRIDE;
+ virtual bool GetScreenList(ScreenList* screens) OVERRIDE;
+ virtual bool SelectScreen(ScreenId id) OVERRIDE;
private:
void CaptureCursor();
@@ -145,7 +213,8 @@ class ScreenCapturerMac : public ScreenCapturer {
void GlBlitSlow(const DesktopFrame& frame);
void CgBlitPreLion(const DesktopFrame& frame,
const DesktopRegion& region);
- void CgBlitPostLion(const DesktopFrame& frame,
+ // Returns false if the selected screen is no longer valid.
+ bool CgBlitPostLion(const DesktopFrame& frame,
const DesktopRegion& region);
// Called when the screen configuration is changed.
@@ -158,8 +227,6 @@ class ScreenCapturerMac : public ScreenCapturer {
void ScreenUpdateMove(CGScreenUpdateMoveDelta delta,
size_t count,
const CGRect *rect_array);
- void DisplaysReconfigured(CGDirectDisplayID display,
- CGDisplayChangeSummaryFlags flags);
static void ScreenRefreshCallback(CGRectCount count,
const CGRect *rect_array,
void *user_parameter);
@@ -167,12 +234,10 @@ class ScreenCapturerMac : public ScreenCapturer {
size_t count,
const CGRect *rect_array,
void *user_parameter);
- static void DisplaysReconfiguredCallback(CGDirectDisplayID display,
- CGDisplayChangeSummaryFlags flags,
- void *user_parameter);
-
void ReleaseBuffers();
+ DesktopFrame* CreateFrame();
+
Callback* callback_;
MouseShapeObserver* mouse_shape_observer_;
@@ -185,6 +250,16 @@ class ScreenCapturerMac : public ScreenCapturer {
// Current display configuration.
MacDesktopConfiguration desktop_config_;
+ // Currently selected display, or 0 if the full desktop is selected. On OS X
+ // 10.6 and before, this is always 0.
+ CGDirectDisplayID current_display_;
+
+ // The physical pixel bounds of the current screen.
+ DesktopRect screen_pixel_bounds_;
+
+ // The dip to physical pixel scale of the current screen.
+ float dip_to_pixel_scale_;
+
// A thread-safe list of invalid rectangles, and the size of the most
// recently captured screen.
ScreenCapturerHelper helper_;
@@ -195,13 +270,8 @@ class ScreenCapturerMac : public ScreenCapturer {
// Contains an invalid region from the previous capture.
DesktopRegion last_invalid_region_;
- // Used to ensure that frame captures do not take place while displays
- // are being reconfigured.
- scoped_ptr<EventWrapper> display_configuration_capture_event_;
-
- // Records the Ids of attached displays which are being reconfigured.
- // Accessed on the thread on which we are notified of display events.
- std::set<CGDirectDisplayID> reconfiguring_displays_;
+ // Monitoring display reconfiguration.
+ scoped_refptr<DesktopConfigurationMonitor> desktop_config_monitor_;
// Power management assertion to prevent the screen from sleeping.
IOPMAssertionID power_assertion_id_display_;
@@ -217,6 +287,8 @@ class ScreenCapturerMac : public ScreenCapturer {
void* opengl_library_;
CGLSetFullScreenFunc cgl_set_full_screen_;
+ CGWindowID excluded_window_;
+
DISALLOW_COPY_AND_ASSIGN(ScreenCapturerMac);
};
@@ -243,24 +315,14 @@ class InvertedDesktopFrame : public DesktopFrame {
DISALLOW_COPY_AND_ASSIGN(InvertedDesktopFrame);
};
-DesktopFrame* CreateFrame(
- const MacDesktopConfiguration& desktop_config) {
-
- DesktopSize size(desktop_config.pixel_bounds.width(),
- desktop_config.pixel_bounds.height());
- scoped_ptr<DesktopFrame> frame(new BasicDesktopFrame(size));
-
- frame->set_dpi(DesktopVector(
- kStandardDPI * desktop_config.dip_to_pixel_scale,
- kStandardDPI * desktop_config.dip_to_pixel_scale));
- return frame.release();
-}
-
-ScreenCapturerMac::ScreenCapturerMac()
+ScreenCapturerMac::ScreenCapturerMac(
+ scoped_refptr<DesktopConfigurationMonitor> desktop_config_monitor)
: callback_(NULL),
mouse_shape_observer_(NULL),
cgl_context_(NULL),
- display_configuration_capture_event_(EventWrapper::Create()),
+ current_display_(0),
+ dip_to_pixel_scale_(1.0f),
+ desktop_config_monitor_(desktop_config_monitor),
power_assertion_id_display_(kIOPMNullAssertionID),
power_assertion_id_user_(kIOPMNullAssertionID),
app_services_library_(NULL),
@@ -268,8 +330,8 @@ ScreenCapturerMac::ScreenCapturerMac()
cg_display_bytes_per_row_(NULL),
cg_display_bits_per_pixel_(NULL),
opengl_library_(NULL),
- cgl_set_full_screen_(NULL) {
- display_configuration_capture_event_->Set();
+ cgl_set_full_screen_(NULL),
+ excluded_window_(0) {
}
ScreenCapturerMac::~ScreenCapturerMac() {
@@ -284,11 +346,6 @@ ScreenCapturerMac::~ScreenCapturerMac() {
ReleaseBuffers();
UnregisterRefreshAndMoveHandlers();
- CGError err = CGDisplayRemoveReconfigurationCallback(
- ScreenCapturerMac::DisplaysReconfiguredCallback, this);
- if (err != kCGErrorSuccess)
- LOG(LS_ERROR) << "CGDisplayRemoveReconfigurationCallback " << err;
-
dlclose(app_services_library_);
dlclose(opengl_library_);
}
@@ -297,14 +354,9 @@ bool ScreenCapturerMac::Init() {
if (!RegisterRefreshAndMoveHandlers()) {
return false;
}
-
- CGError err = CGDisplayRegisterReconfigurationCallback(
- ScreenCapturerMac::DisplaysReconfiguredCallback, this);
- if (err != kCGErrorSuccess) {
- LOG(LS_ERROR) << "CGDisplayRegisterReconfigurationCallback " << err;
- return false;
- }
-
+ desktop_config_monitor_->Lock();
+ desktop_config_ = desktop_config_monitor_->desktop_configuration();
+ desktop_config_monitor_->Unlock();
ScreenConfigurationChanged();
return true;
}
@@ -343,20 +395,22 @@ void ScreenCapturerMac::Start(Callback* callback) {
&power_assertion_id_user_);
}
-void ScreenCapturerMac::Capture(
- const DesktopRegion& region_to_capture) {
+void ScreenCapturerMac::Capture(const DesktopRegion& region_to_capture) {
TickTime capture_start_time = TickTime::Now();
queue_.MoveToNextFrame();
- // Wait until the display configuration is stable. If one or more displays
- // are reconfiguring then |display_configuration_capture_event_| will not be
- // set until the reconfiguration completes.
- // TODO(wez): Replace this with an early-exit (See crbug.com/104542).
- if (!display_configuration_capture_event_->Wait(
- kDisplayConfigurationEventTimeoutMs)) {
- LOG_F(LS_ERROR) << "Event wait timed out.";
- abort();
+ desktop_config_monitor_->Lock();
+ MacDesktopConfiguration new_config =
+ desktop_config_monitor_->desktop_configuration();
+ if (!desktop_config_.Equals(new_config)) {
+ desktop_config_ = new_config;
+ // If the display configuraiton has changed then refresh capturer data
+ // structures. Occasionally, the refresh and move handlers are lost when
+ // the screen mode changes, so re-register them here.
+ UnregisterRefreshAndMoveHandlers();
+ RegisterRefreshAndMoveHandlers();
+ ScreenConfigurationChanged();
}
DesktopRegion region;
@@ -366,7 +420,7 @@ void ScreenCapturerMac::Capture(
// Note that we can't reallocate other buffers at this point, since the caller
// may still be reading from them.
if (!queue_.current_frame())
- queue_.ReplaceCurrentFrame(CreateFrame(desktop_config_));
+ queue_.ReplaceCurrentFrame(CreateFrame());
DesktopFrame* current_frame = queue_.current_frame();
@@ -374,7 +428,10 @@ void ScreenCapturerMac::Capture(
if (IsOSLionOrLater()) {
// Lion requires us to use their new APIs for doing screen capture. These
// APIS currently crash on 10.6.8 if there is no monitor attached.
- CgBlitPostLion(*current_frame, region);
+ if (!CgBlitPostLion(*current_frame, region)) {
+ callback_->OnCaptureCompleted(NULL);
+ return;
+ }
} else if (cgl_context_) {
flip = true;
if (pixel_buffer_object_.get() != 0) {
@@ -398,7 +455,7 @@ void ScreenCapturerMac::Capture(
// Signal that we are done capturing data from the display framebuffer,
// and accessing display structures.
- display_configuration_capture_event_->Set();
+ desktop_config_monitor_->Unlock();
// Capture the current cursor shape and notify |callback_| if it has changed.
CaptureCursor();
@@ -408,6 +465,10 @@ void ScreenCapturerMac::Capture(
callback_->OnCaptureCompleted(new_frame);
}
+void ScreenCapturerMac::SetExcludedWindow(WindowId window) {
+ excluded_window_ = window;
+}
+
void ScreenCapturerMac::SetMouseShapeObserver(
MouseShapeObserver* mouse_shape_observer) {
assert(!mouse_shape_observer_);
@@ -415,6 +476,47 @@ void ScreenCapturerMac::SetMouseShapeObserver(
mouse_shape_observer_ = mouse_shape_observer;
}
+bool ScreenCapturerMac::GetScreenList(ScreenList* screens) {
+ assert(screens->size() == 0);
+ if (!IsOSLionOrLater()) {
+ // Single monitor cast is not supported on pre OS X 10.7.
+ Screen screen;
+ screen.id = kFullDesktopScreenId;
+ screens->push_back(screen);
+ return true;
+ }
+
+ for (MacDisplayConfigurations::iterator it = desktop_config_.displays.begin();
+ it != desktop_config_.displays.end(); ++it) {
+ Screen screen;
+ screen.id = static_cast<ScreenId>(it->id);
+ screens->push_back(screen);
+ }
+ return true;
+}
+
+bool ScreenCapturerMac::SelectScreen(ScreenId id) {
+ if (!IsOSLionOrLater()) {
+ // Ignore the screen selection on unsupported OS.
+ assert(!current_display_);
+ return id == kFullDesktopScreenId;
+ }
+
+ if (id == kFullDesktopScreenId) {
+ current_display_ = 0;
+ } else {
+ const MacDisplayConfiguration* config =
+ desktop_config_.FindDisplayConfigurationById(
+ static_cast<CGDirectDisplayID>(id));
+ if (!config)
+ return false;
+ current_display_ = config->id;
+ }
+
+ ScreenConfigurationChanged();
+ return true;
+}
+
void ScreenCapturerMac::CaptureCursor() {
if (!mouse_shape_observer_)
return;
@@ -608,7 +710,7 @@ void ScreenCapturerMac::CgBlitPreLion(const DesktopFrame& frame,
}
}
-void ScreenCapturerMac::CgBlitPostLion(const DesktopFrame& frame,
+bool ScreenCapturerMac::CgBlitPostLion(const DesktopFrame& frame,
const DesktopRegion& region) {
// Copy the entire contents of the previous capture buffer, to capture over.
// TODO(wez): Get rid of this as per crbug.com/145064, or implement
@@ -619,13 +721,40 @@ void ScreenCapturerMac::CgBlitPostLion(const DesktopFrame& frame,
frame.stride() * frame.size().height());
}
- for (size_t i = 0; i < desktop_config_.displays.size(); ++i) {
- const MacDisplayConfiguration& display_config = desktop_config_.displays[i];
+ MacDisplayConfigurations displays_to_capture;
+ if (current_display_) {
+ // Capturing a single screen. Note that the screen id may change when
+ // screens are added or removed.
+ const MacDisplayConfiguration* config =
+ desktop_config_.FindDisplayConfigurationById(current_display_);
+ if (config) {
+ displays_to_capture.push_back(*config);
+ } else {
+ LOG(LS_ERROR) << "The selected screen cannot be found for capturing.";
+ return false;
+ }
+ } else {
+ // Capturing the whole desktop.
+ displays_to_capture = desktop_config_.displays;
+ }
+ // Create the window list once for all displays.
+ CFArrayRef window_list = CreateWindowListWithExclusion(excluded_window_);
+
+ for (size_t i = 0; i < displays_to_capture.size(); ++i) {
+ const MacDisplayConfiguration& display_config = displays_to_capture[i];
+
+ // Capturing mixed-DPI on one surface is hard, so we only return displays
+ // that match the "primary" display's DPI. The primary display is always
+ // the first in the list.
+ if (i > 0 && display_config.dip_to_pixel_scale !=
+ displays_to_capture[0].dip_to_pixel_scale) {
+ continue;
+ }
// Determine the display's position relative to the desktop, in pixels.
DesktopRect display_bounds = display_config.pixel_bounds;
- display_bounds.Translate(-desktop_config_.pixel_bounds.left(),
- -desktop_config_.pixel_bounds.top());
+ display_bounds.Translate(-screen_pixel_bounds_.left(),
+ -screen_pixel_bounds_.top());
// Determine which parts of the blit region, if any, lay within the monitor.
DesktopRegion copy_region = region;
@@ -636,6 +765,26 @@ void ScreenCapturerMac::CgBlitPostLion(const DesktopFrame& frame,
// Translate the region to be copied into display-relative coordinates.
copy_region.Translate(-display_bounds.left(), -display_bounds.top());
+ DesktopRect excluded_window_bounds;
+ CGImageRef excluded_image = NULL;
+ CFDataRef excluded_window_region_data = NULL;
+ if (excluded_window_ && window_list) {
+ // Get the region of the excluded window relative the primary display.
+ excluded_window_bounds = GetExcludedWindowPixelBounds(
+ excluded_window_, display_config.dip_to_pixel_scale);
+ excluded_window_bounds.IntersectWith(display_config.pixel_bounds);
+
+ // Create the image under the excluded window first, because it's faster
+ // than captuing the whole display.
+ if (!excluded_window_bounds.is_empty()) {
+ excluded_image = CreateExcludedWindowRegionImage(
+ excluded_window_bounds,
+ display_config.dip_to_pixel_scale,
+ window_list,
+ &excluded_window_region_data);
+ }
+ }
+
// Create an image containing a snapshot of the display.
CGImageRef image = CGDisplayCreateImage(display_config.id);
if (image == NULL)
@@ -665,26 +814,58 @@ void ScreenCapturerMac::CgBlitPostLion(const DesktopFrame& frame,
i.rect());
}
+ // Copy the region of the excluded window to the frame.
+ if (excluded_image) {
+ assert(excluded_window_region_data);
+ display_base_address = CFDataGetBytePtr(excluded_window_region_data);
+ src_bytes_per_row = CGImageGetBytesPerRow(excluded_image);
+
+ // Translate the bounds relative to the desktop, because |frame| data
+ // starts from the desktop top-left corner.
+ DesktopRect window_bounds_relative_to_desktop(excluded_window_bounds);
+ window_bounds_relative_to_desktop.Translate(
+ -screen_pixel_bounds_.left(), -screen_pixel_bounds_.top());
+ out_ptr = frame.data() +
+ (window_bounds_relative_to_desktop.left() * src_bytes_per_pixel) +
+ (window_bounds_relative_to_desktop.top() * frame.stride());
+
+ CopyRect(display_base_address,
+ src_bytes_per_row,
+ out_ptr,
+ frame.stride(),
+ src_bytes_per_pixel,
+ DesktopRect::MakeSize(excluded_window_bounds.size()));
+ CFRelease(excluded_window_region_data);
+ CFRelease(excluded_image);
+ }
+
CFRelease(data);
CFRelease(image);
}
+ if (window_list)
+ CFRelease(window_list);
+ return true;
}
void ScreenCapturerMac::ScreenConfigurationChanged() {
+ if (current_display_) {
+ const MacDisplayConfiguration* config =
+ desktop_config_.FindDisplayConfigurationById(current_display_);
+ screen_pixel_bounds_ = config ? config->pixel_bounds : DesktopRect();
+ dip_to_pixel_scale_ = config ? config->dip_to_pixel_scale : 1.0f;
+ } else {
+ screen_pixel_bounds_ = desktop_config_.pixel_bounds;
+ dip_to_pixel_scale_ = desktop_config_.dip_to_pixel_scale;
+ }
+
// Release existing buffers, which will be of the wrong size.
ReleaseBuffers();
// Clear the dirty region, in case the display is down-sizing.
helper_.ClearInvalidRegion();
- // Refresh the cached desktop configuration.
- desktop_config_ = MacDesktopConfiguration::GetCurrent(
- MacDesktopConfiguration::TopLeftOrigin);
-
// Re-mark the entire desktop as dirty.
- helper_.InvalidateScreen(
- DesktopSize(desktop_config_.pixel_bounds.width(),
- desktop_config_.pixel_bounds.height()));
+ helper_.InvalidateScreen(screen_pixel_bounds_.size());
// Make sure the frame buffers will be reallocated.
queue_.Reset();
@@ -765,8 +946,8 @@ void ScreenCapturerMac::ScreenConfigurationChanged() {
(*cgl_set_full_screen_)(cgl_context_);
CGLSetCurrentContext(cgl_context_);
- size_t buffer_size = desktop_config_.pixel_bounds.width() *
- desktop_config_.pixel_bounds.height() *
+ size_t buffer_size = screen_pixel_bounds_.width() *
+ screen_pixel_bounds_.height() *
sizeof(uint32_t);
pixel_buffer_object_.Init(cgl_context_, buffer_size);
}
@@ -798,20 +979,17 @@ void ScreenCapturerMac::UnregisterRefreshAndMoveHandlers() {
void ScreenCapturerMac::ScreenRefresh(CGRectCount count,
const CGRect* rect_array) {
- if (desktop_config_.pixel_bounds.is_empty())
+ if (screen_pixel_bounds_.is_empty())
return;
DesktopRegion region;
-
+ DesktopVector translate_vector =
+ DesktopVector().subtract(screen_pixel_bounds_.top_left());
for (CGRectCount i = 0; i < count; ++i) {
// Convert from Density-Independent Pixel to physical pixel coordinates.
- DesktopRect rect =
- ScaleAndRoundCGRect(rect_array[i], desktop_config_.dip_to_pixel_scale);
-
+ DesktopRect rect = ScaleAndRoundCGRect(rect_array[i], dip_to_pixel_scale_);
// Translate from local desktop to capturer framebuffer coordinates.
- rect.Translate(-desktop_config_.pixel_bounds.left(),
- -desktop_config_.pixel_bounds.top());
-
+ rect.Translate(translate_vector);
region.AddRect(rect);
}
@@ -831,45 +1009,12 @@ void ScreenCapturerMac::ScreenUpdateMove(CGScreenUpdateMoveDelta delta,
ScreenRefresh(count, refresh_rects);
}
-void ScreenCapturerMac::DisplaysReconfigured(
- CGDirectDisplayID display,
- CGDisplayChangeSummaryFlags flags) {
- if (flags & kCGDisplayBeginConfigurationFlag) {
- if (reconfiguring_displays_.empty()) {
- // If this is the first display to start reconfiguring then wait on
- // |display_configuration_capture_event_| to block the capture thread
- // from accessing display memory until the reconfiguration completes.
- if (!display_configuration_capture_event_->Wait(
- kDisplayConfigurationEventTimeoutMs)) {
- LOG_F(LS_ERROR) << "Event wait timed out.";
- abort();
- }
- }
-
- reconfiguring_displays_.insert(display);
- } else {
- reconfiguring_displays_.erase(display);
-
- if (reconfiguring_displays_.empty()) {
- // If no other displays are reconfiguring then refresh capturer data
- // structures and un-block the capturer thread. Occasionally, the
- // refresh and move handlers are lost when the screen mode changes,
- // so re-register them here (the same does not appear to be true for
- // the reconfiguration handler itself).
- UnregisterRefreshAndMoveHandlers();
- RegisterRefreshAndMoveHandlers();
- ScreenConfigurationChanged();
- display_configuration_capture_event_->Set();
- }
- }
-}
-
void ScreenCapturerMac::ScreenRefreshCallback(CGRectCount count,
const CGRect* rect_array,
void* user_parameter) {
ScreenCapturerMac* capturer =
reinterpret_cast<ScreenCapturerMac*>(user_parameter);
- if (capturer->desktop_config_.pixel_bounds.is_empty())
+ if (capturer->screen_pixel_bounds_.is_empty())
capturer->ScreenConfigurationChanged();
capturer->ScreenRefresh(count, rect_array);
}
@@ -884,20 +1029,24 @@ void ScreenCapturerMac::ScreenUpdateMoveCallback(
capturer->ScreenUpdateMove(delta, count, rect_array);
}
-void ScreenCapturerMac::DisplaysReconfiguredCallback(
- CGDirectDisplayID display,
- CGDisplayChangeSummaryFlags flags,
- void* user_parameter) {
- ScreenCapturerMac* capturer =
- reinterpret_cast<ScreenCapturerMac*>(user_parameter);
- capturer->DisplaysReconfigured(display, flags);
+DesktopFrame* ScreenCapturerMac::CreateFrame() {
+ scoped_ptr<DesktopFrame> frame(
+ new BasicDesktopFrame(screen_pixel_bounds_.size()));
+
+ frame->set_dpi(DesktopVector(kStandardDPI * dip_to_pixel_scale_,
+ kStandardDPI * dip_to_pixel_scale_));
+ return frame.release();
}
} // namespace
// static
ScreenCapturer* ScreenCapturer::Create(const DesktopCaptureOptions& options) {
- scoped_ptr<ScreenCapturerMac> capturer(new ScreenCapturerMac());
+ if (!options.configuration_monitor())
+ return NULL;
+
+ scoped_ptr<ScreenCapturerMac> capturer(
+ new ScreenCapturerMac(options.configuration_monitor()));
if (!capturer->Init())
capturer.reset();
return capturer.release();
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h
index 17673b5cc0e..aa0e808eb69 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_mock_objects.h
@@ -26,6 +26,8 @@ class MockScreenCapturer : public ScreenCapturer {
MOCK_METHOD1(Capture, void(const DesktopRegion& region));
MOCK_METHOD1(SetMouseShapeObserver, void(
MouseShapeObserver* mouse_shape_observer));
+ MOCK_METHOD1(GetScreenList, bool(ScreenList* screens));
+ MOCK_METHOD1(SelectScreen, bool(ScreenId id));
private:
DISALLOW_COPY_AND_ASSIGN(MockScreenCapturer);
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc
index b4ae128085a..50ff7a2853a 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_unittest.cc
@@ -59,6 +59,15 @@ SharedMemory* ScreenCapturerTest::CreateSharedMemory(size_t size) {
return new FakeSharedMemory(new char[size], size);
}
+TEST_F(ScreenCapturerTest, GetScreenListAndSelectScreen) {
+ webrtc::ScreenCapturer::ScreenList screens;
+ EXPECT_TRUE(capturer_->GetScreenList(&screens));
+ for(webrtc::ScreenCapturer::ScreenList::iterator it = screens.begin();
+ it != screens.end(); ++it) {
+ EXPECT_TRUE(capturer_->SelectScreen(it->id));
+ }
+}
+
TEST_F(ScreenCapturerTest, StartCapturer) {
capturer_->SetMouseShapeObserver(&mouse_observer_);
capturer_->Start(&callback_);
@@ -97,7 +106,7 @@ TEST_F(ScreenCapturerTest, Capture) {
delete frame;
}
-#if defined(OS_WIN)
+#if defined(WEBRTC_WIN)
TEST_F(ScreenCapturerTest, UseSharedBuffers) {
DesktopFrame* frame = NULL;
@@ -120,6 +129,20 @@ TEST_F(ScreenCapturerTest, UseSharedBuffers) {
delete frame;
}
-#endif // defined(OS_WIN)
+TEST_F(ScreenCapturerTest, UseMagnifier) {
+ DesktopCaptureOptions options(DesktopCaptureOptions::CreateDefault());
+ options.set_allow_use_magnification_api(true);
+ capturer_.reset(ScreenCapturer::Create(options));
+
+ DesktopFrame* frame = NULL;
+ EXPECT_CALL(callback_, OnCaptureCompleted(_)).WillOnce(SaveArg<0>(&frame));
+
+ capturer_->Start(&callback_);
+ capturer_->Capture(DesktopRegion());
+ ASSERT_TRUE(frame);
+ delete frame;
+}
+
+#endif // defined(WEBRTC_WIN)
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_win.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_win.cc
index a9bcd48f9c1..5950795d470 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_win.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_win.cc
@@ -10,364 +10,20 @@
#include "webrtc/modules/desktop_capture/screen_capturer.h"
-#include <windows.h>
-
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
-#include "webrtc/modules/desktop_capture/desktop_frame.h"
-#include "webrtc/modules/desktop_capture/desktop_frame_win.h"
-#include "webrtc/modules/desktop_capture/desktop_region.h"
-#include "webrtc/modules/desktop_capture/differ.h"
-#include "webrtc/modules/desktop_capture/mouse_cursor.h"
-#include "webrtc/modules/desktop_capture/mouse_cursor_shape.h"
-#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
-#include "webrtc/modules/desktop_capture/screen_capturer_helper.h"
-#include "webrtc/modules/desktop_capture/win/cursor.h"
-#include "webrtc/modules/desktop_capture/win/desktop.h"
-#include "webrtc/modules/desktop_capture/win/scoped_thread_desktop.h"
-#include "webrtc/system_wrappers/interface/logging.h"
-#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/system_wrappers/interface/tick_util.h"
+#include "webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h"
+#include "webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h"
namespace webrtc {
-namespace {
-
-// Constants from dwmapi.h.
-const UINT DWM_EC_DISABLECOMPOSITION = 0;
-const UINT DWM_EC_ENABLECOMPOSITION = 1;
-
-typedef HRESULT (WINAPI * DwmEnableCompositionFunc)(UINT);
-
-const wchar_t kDwmapiLibraryName[] = L"dwmapi.dll";
-
-// ScreenCapturerWin captures 32bit RGB using GDI.
-//
-// ScreenCapturerWin is double-buffered as required by ScreenCapturer.
-class ScreenCapturerWin : public ScreenCapturer {
- public:
- ScreenCapturerWin(const DesktopCaptureOptions& options);
- virtual ~ScreenCapturerWin();
-
- // Overridden from ScreenCapturer:
- virtual void Start(Callback* callback) OVERRIDE;
- virtual void Capture(const DesktopRegion& region) OVERRIDE;
- virtual void SetMouseShapeObserver(
- MouseShapeObserver* mouse_shape_observer) OVERRIDE;
-
- private:
- // Make sure that the device contexts match the screen configuration.
- void PrepareCaptureResources();
-
- // Captures the current screen contents into the current buffer.
- void CaptureImage();
-
- // Capture the current cursor shape.
- void CaptureCursor();
-
- Callback* callback_;
- MouseShapeObserver* mouse_shape_observer_;
-
- // A thread-safe list of invalid rectangles, and the size of the most
- // recently captured screen.
- ScreenCapturerHelper helper_;
-
- // Snapshot of the last cursor bitmap we sent to the client. This is used
- // to diff against the current cursor so we only send a cursor-change
- // message when the shape has changed.
- MouseCursorShape last_cursor_;
-
- ScopedThreadDesktop desktop_;
-
- // GDI resources used for screen capture.
- HDC desktop_dc_;
- HDC memory_dc_;
-
- // Queue of the frames buffers.
- ScreenCaptureFrameQueue queue_;
-
- // Rectangle describing the bounds of the desktop device context.
- DesktopRect desktop_dc_rect_;
-
- // Class to calculate the difference between two screen bitmaps.
- scoped_ptr<Differ> differ_;
-
- HMODULE dwmapi_library_;
- DwmEnableCompositionFunc composition_func_;
-
- // Used to suppress duplicate logging of SetThreadExecutionState errors.
- bool set_thread_execution_state_failed_;
-
- DISALLOW_COPY_AND_ASSIGN(ScreenCapturerWin);
-};
-
-ScreenCapturerWin::ScreenCapturerWin(const DesktopCaptureOptions& options)
- : callback_(NULL),
- mouse_shape_observer_(NULL),
- desktop_dc_(NULL),
- memory_dc_(NULL),
- dwmapi_library_(NULL),
- composition_func_(NULL),
- set_thread_execution_state_failed_(false) {
- if (options.disable_effects()) {
- // Load dwmapi.dll dynamically since it is not available on XP.
- if (!dwmapi_library_)
- dwmapi_library_ = LoadLibrary(kDwmapiLibraryName);
-
- if (dwmapi_library_) {
- composition_func_ = reinterpret_cast<DwmEnableCompositionFunc>(
- GetProcAddress(dwmapi_library_, "DwmEnableComposition"));
- }
- }
-}
-
-ScreenCapturerWin::~ScreenCapturerWin() {
- if (desktop_dc_)
- ReleaseDC(NULL, desktop_dc_);
- if (memory_dc_)
- DeleteDC(memory_dc_);
-
- // Restore Aero.
- if (composition_func_)
- (*composition_func_)(DWM_EC_ENABLECOMPOSITION);
-
- if (dwmapi_library_)
- FreeLibrary(dwmapi_library_);
-}
-
-void ScreenCapturerWin::Capture(const DesktopRegion& region) {
- TickTime capture_start_time = TickTime::Now();
-
- queue_.MoveToNextFrame();
-
- // Request that the system not power-down the system, or the display hardware.
- if (!SetThreadExecutionState(ES_DISPLAY_REQUIRED | ES_SYSTEM_REQUIRED)) {
- if (!set_thread_execution_state_failed_) {
- set_thread_execution_state_failed_ = true;
- LOG_F(LS_WARNING) << "Failed to make system & display power assertion: "
- << GetLastError();
- }
- }
-
- // Make sure the GDI capture resources are up-to-date.
- PrepareCaptureResources();
-
- // Copy screen bits to the current buffer.
- CaptureImage();
-
- const DesktopFrame* current_frame = queue_.current_frame();
- const DesktopFrame* last_frame = queue_.previous_frame();
- if (last_frame) {
- // Make sure the differencer is set up correctly for these previous and
- // current screens.
- if (!differ_.get() ||
- (differ_->width() != current_frame->size().width()) ||
- (differ_->height() != current_frame->size().height()) ||
- (differ_->bytes_per_row() != current_frame->stride())) {
- differ_.reset(new Differ(current_frame->size().width(),
- current_frame->size().height(),
- DesktopFrame::kBytesPerPixel,
- current_frame->stride()));
- }
-
- // Calculate difference between the two last captured frames.
- DesktopRegion region;
- differ_->CalcDirtyRegion(last_frame->data(), current_frame->data(),
- &region);
- helper_.InvalidateRegion(region);
- } else {
- // No previous frame is available. Invalidate the whole screen.
- helper_.InvalidateScreen(current_frame->size());
- }
-
- helper_.set_size_most_recent(current_frame->size());
-
- // Emit the current frame.
- DesktopFrame* frame = queue_.current_frame()->Share();
- frame->set_dpi(DesktopVector(
- GetDeviceCaps(desktop_dc_, LOGPIXELSX),
- GetDeviceCaps(desktop_dc_, LOGPIXELSY)));
- frame->mutable_updated_region()->Clear();
- helper_.TakeInvalidRegion(frame->mutable_updated_region());
- frame->set_capture_time_ms(
- (TickTime::Now() - capture_start_time).Milliseconds());
- callback_->OnCaptureCompleted(frame);
-
- // Check for cursor shape update.
- CaptureCursor();
-}
-
-void ScreenCapturerWin::SetMouseShapeObserver(
- MouseShapeObserver* mouse_shape_observer) {
- assert(!mouse_shape_observer_);
- assert(mouse_shape_observer);
-
- mouse_shape_observer_ = mouse_shape_observer;
-}
-
-void ScreenCapturerWin::Start(Callback* callback) {
- assert(!callback_);
- assert(callback);
-
- callback_ = callback;
-
- // Vote to disable Aero composited desktop effects while capturing. Windows
- // will restore Aero automatically if the process exits. This has no effect
- // under Windows 8 or higher. See crbug.com/124018.
- if (composition_func_)
- (*composition_func_)(DWM_EC_DISABLECOMPOSITION);
-}
-
-void ScreenCapturerWin::PrepareCaptureResources() {
- // Switch to the desktop receiving user input if different from the current
- // one.
- scoped_ptr<Desktop> input_desktop(Desktop::GetInputDesktop());
- if (input_desktop.get() != NULL && !desktop_.IsSame(*input_desktop)) {
- // Release GDI resources otherwise SetThreadDesktop will fail.
- if (desktop_dc_) {
- ReleaseDC(NULL, desktop_dc_);
- desktop_dc_ = NULL;
- }
-
- if (memory_dc_) {
- DeleteDC(memory_dc_);
- memory_dc_ = NULL;
- }
-
- // If SetThreadDesktop() fails, the thread is still assigned a desktop.
- // So we can continue capture screen bits, just from the wrong desktop.
- desktop_.SetThreadDesktop(input_desktop.release());
-
- // Re-assert our vote to disable Aero.
- // See crbug.com/124018 and crbug.com/129906.
- if (composition_func_ != NULL) {
- (*composition_func_)(DWM_EC_DISABLECOMPOSITION);
- }
- }
-
- // If the display bounds have changed then recreate GDI resources.
- // TODO(wez): Also check for pixel format changes.
- DesktopRect screen_rect(DesktopRect::MakeXYWH(
- GetSystemMetrics(SM_XVIRTUALSCREEN),
- GetSystemMetrics(SM_YVIRTUALSCREEN),
- GetSystemMetrics(SM_CXVIRTUALSCREEN),
- GetSystemMetrics(SM_CYVIRTUALSCREEN)));
- if (!screen_rect.equals(desktop_dc_rect_)) {
- if (desktop_dc_) {
- ReleaseDC(NULL, desktop_dc_);
- desktop_dc_ = NULL;
- }
- if (memory_dc_) {
- DeleteDC(memory_dc_);
- memory_dc_ = NULL;
- }
- desktop_dc_rect_ = DesktopRect();
- }
-
- if (desktop_dc_ == NULL) {
- assert(memory_dc_ == NULL);
-
- // Create GDI device contexts to capture from the desktop into memory.
- desktop_dc_ = GetDC(NULL);
- if (!desktop_dc_)
- abort();
- memory_dc_ = CreateCompatibleDC(desktop_dc_);
- if (!memory_dc_)
- abort();
- desktop_dc_rect_ = screen_rect;
-
- // Make sure the frame buffers will be reallocated.
- queue_.Reset();
-
- helper_.ClearInvalidRegion();
- }
-}
-
-void ScreenCapturerWin::CaptureImage() {
- // If the current buffer is from an older generation then allocate a new one.
- // Note that we can't reallocate other buffers at this point, since the caller
- // may still be reading from them.
- if (!queue_.current_frame()) {
- assert(desktop_dc_ != NULL);
- assert(memory_dc_ != NULL);
-
- DesktopSize size = DesktopSize(
- desktop_dc_rect_.width(), desktop_dc_rect_.height());
-
- size_t buffer_size = size.width() * size.height() *
- DesktopFrame::kBytesPerPixel;
- SharedMemory* shared_memory =
- callback_->CreateSharedMemory(buffer_size);
- scoped_ptr<DesktopFrameWin> buffer(
- DesktopFrameWin::Create(size, shared_memory, desktop_dc_));
- queue_.ReplaceCurrentFrame(buffer.release());
- }
-
- // Select the target bitmap into the memory dc and copy the rect from desktop
- // to memory.
- DesktopFrameWin* current = static_cast<DesktopFrameWin*>(
- queue_.current_frame()->GetUnderlyingFrame());
- HGDIOBJ previous_object = SelectObject(memory_dc_, current->bitmap());
- if (previous_object != NULL) {
- BitBlt(memory_dc_,
- 0, 0, desktop_dc_rect_.width(), desktop_dc_rect_.height(),
- desktop_dc_,
- desktop_dc_rect_.left(), desktop_dc_rect_.top(),
- SRCCOPY | CAPTUREBLT);
-
- // Select back the previously selected object to that the device contect
- // could be destroyed independently of the bitmap if needed.
- SelectObject(memory_dc_, previous_object);
- }
-}
-
-void ScreenCapturerWin::CaptureCursor() {
- CURSORINFO cursor_info;
- cursor_info.cbSize = sizeof(CURSORINFO);
- if (!GetCursorInfo(&cursor_info)) {
- LOG_F(LS_ERROR) << "Unable to get cursor info. Error = " << GetLastError();
- return;
- }
-
- // Note that |cursor_info.hCursor| does not need to be freed.
- scoped_ptr<MouseCursor> cursor_image(
- CreateMouseCursorFromHCursor(desktop_dc_, cursor_info.hCursor));
- if (!cursor_image.get())
- return;
-
- scoped_ptr<MouseCursorShape> cursor(new MouseCursorShape);
- cursor->hotspot = cursor_image->hotspot();
- cursor->size = cursor_image->image().size();
- uint8_t* current_row = cursor_image->image().data();
- for (int y = 0; y < cursor_image->image().size().height(); ++y) {
- cursor->data.append(current_row,
- current_row + cursor_image->image().size().width() *
- DesktopFrame::kBytesPerPixel);
- current_row += cursor_image->image().stride();
- }
-
- // Compare the current cursor with the last one we sent to the client. If
- // they're the same, then don't bother sending the cursor again.
- if (last_cursor_.size.equals(cursor->size) &&
- last_cursor_.hotspot.equals(cursor->hotspot) &&
- last_cursor_.data == cursor->data) {
- return;
- }
-
- LOG(LS_VERBOSE) << "Sending updated cursor: " << cursor->size.width() << "x"
- << cursor->size.height();
-
- // Record the last cursor image that we sent to the client.
- last_cursor_ = *cursor;
-
- if (mouse_shape_observer_)
- mouse_shape_observer_->OnCursorShapeChanged(cursor.release());
-}
-
-} // namespace
-
// static
ScreenCapturer* ScreenCapturer::Create(const DesktopCaptureOptions& options) {
- return new ScreenCapturerWin(options);
+ scoped_ptr<ScreenCapturer> gdi_capturer(new ScreenCapturerWinGdi(options));
+
+ if (options.allow_use_magnification_api())
+ return new ScreenCapturerWinMagnifier(gdi_capturer.Pass());
+
+ return gdi_capturer.release();
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc
index c5a4c8cb17a..4d07d98c628 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/screen_capturer_x11.cc
@@ -57,6 +57,8 @@ class ScreenCapturerLinux : public ScreenCapturer,
// ScreenCapturer interface.
virtual void SetMouseShapeObserver(
MouseShapeObserver* mouse_shape_observer) OVERRIDE;
+ virtual bool GetScreenList(ScreenList* screens) OVERRIDE;
+ virtual bool SelectScreen(ScreenId id) OVERRIDE;
private:
Display* display() { return options_.x_display()->display(); }
@@ -310,6 +312,20 @@ void ScreenCapturerLinux::SetMouseShapeObserver(
mouse_shape_observer_ = mouse_shape_observer;
}
+bool ScreenCapturerLinux::GetScreenList(ScreenList* screens) {
+ DCHECK(screens->size() == 0);
+ // TODO(jiayl): implement screen enumeration.
+ Screen default_screen;
+ default_screen.id = 0;
+ screens->push_back(default_screen);
+ return true;
+}
+
+bool ScreenCapturerLinux::SelectScreen(ScreenId id) {
+ // TODO(jiayl): implement screen selection.
+ return true;
+}
+
bool ScreenCapturerLinux::HandleXEvent(const XEvent& event) {
if (use_damage_ && (event.type == damage_event_base_ + XDamageNotify)) {
const XDamageNotifyEvent* damage_event =
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h b/chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h
index bb43b28b1c7..7870d833f15 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/shared_memory.h
@@ -17,8 +17,8 @@
#include <windows.h>
#endif
+#include "webrtc/base/constructormagic.h"
#include "webrtc/typedefs.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/cursor.cc b/chromium/third_party/webrtc/modules/desktop_capture/win/cursor.cc
index 11bb2dbb6d0..00055c44ad8 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/cursor.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/cursor.cc
@@ -137,7 +137,7 @@ MouseCursor* CreateMouseCursorFromHCursor(HDC dc, HCURSOR cursor) {
int width = bitmap_info.bmWidth;
int height = bitmap_info.bmHeight;
- scoped_array<uint32_t> mask_data(new uint32_t[width * height]);
+ scoped_ptr<uint32_t[]> mask_data(new uint32_t[width * height]);
// Get pixel data from |scoped_mask| converting it to 32bpp along the way.
// GetDIBits() sets the alpha component of every pixel to 0.
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/cursor_unittest.cc b/chromium/third_party/webrtc/modules/desktop_capture/win/cursor_unittest.cc
index f590bd255ce..b046ace315a 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/cursor_unittest.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/cursor_unittest.cc
@@ -58,15 +58,15 @@ bool ConvertToMouseShapeAndCompare(unsigned left, unsigned right) {
int width = bitmap_info.bmWidth;
int height = bitmap_info.bmHeight;
- EXPECT_TRUE(DesktopSize(width, height).equals(mouse_shape->image().size()));
+ EXPECT_TRUE(DesktopSize(width, height).equals(mouse_shape->image()->size()));
// Get the pixels from |scoped_color|.
int size = width * height;
- scoped_array<uint32_t> data(new uint32_t[size]);
+ scoped_ptr<uint32_t[]> data(new uint32_t[size]);
EXPECT_TRUE(GetBitmapBits(scoped_color, size * sizeof(uint32_t), data.get()));
// Compare the 32bpp image in |mouse_shape| with the one loaded from |right|.
- return memcmp(data.get(), mouse_shape->image().data(),
+ return memcmp(data.get(), mouse_shape->image()->data(),
size * sizeof(uint32_t)) == 0;
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/desktop.h b/chromium/third_party/webrtc/modules/desktop_capture/win/desktop.h
index bdc490c7286..fda56ca8d81 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/desktop.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/desktop.h
@@ -11,10 +11,10 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_DESKTOP_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_DESKTOP_H_
-#include <string>
#include <windows.h>
+#include <string>
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/scoped_gdi_object.h b/chromium/third_party/webrtc/modules/desktop_capture/win/scoped_gdi_object.h
index 0ca35c526ab..366df6d4ff0 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/scoped_gdi_object.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/scoped_gdi_object.h
@@ -13,7 +13,7 @@
#include <windows.h>
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/scoped_thread_desktop.h b/chromium/third_party/webrtc/modules/desktop_capture/win/scoped_thread_desktop.h
index 39514237ea7..f12731d975e 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/win/scoped_thread_desktop.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/scoped_thread_desktop.h
@@ -13,7 +13,7 @@
#include <windows.h>
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capture_utils.cc b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capture_utils.cc
new file mode 100644
index 00000000000..1b335452779
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capture_utils.cc
@@ -0,0 +1,92 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/desktop_capture/win/screen_capture_utils.h"
+
+#include <assert.h>
+#include <windows.h>
+
+namespace webrtc {
+
+bool GetScreenList(ScreenCapturer::ScreenList* screens) {
+ assert(screens->size() == 0);
+
+ BOOL enum_result = TRUE;
+ for (int device_index = 0;; ++device_index) {
+ DISPLAY_DEVICE device;
+ device.cb = sizeof(device);
+ enum_result = EnumDisplayDevices(NULL, device_index, &device, 0);
+
+ // |enum_result| is 0 if we have enumerated all devices.
+ if (!enum_result)
+ break;
+
+ // We only care about active displays.
+ if (!(device.StateFlags & DISPLAY_DEVICE_ACTIVE))
+ continue;
+
+ ScreenCapturer::Screen screen;
+ screen.id = device_index;
+ screens->push_back(screen);
+ }
+ return true;
+}
+
+bool IsScreenValid(ScreenId screen, std::wstring* device_key) {
+ if (screen == kFullDesktopScreenId) {
+ *device_key = L"";
+ return true;
+ }
+
+ DISPLAY_DEVICE device;
+ device.cb = sizeof(device);
+ BOOL enum_result = EnumDisplayDevices(NULL, screen, &device, 0);
+ if (enum_result)
+ *device_key = device.DeviceKey;
+
+ return !!enum_result;
+}
+
+DesktopRect GetScreenRect(ScreenId screen, const std::wstring& device_key) {
+ if (screen == kFullDesktopScreenId) {
+ return DesktopRect::MakeXYWH(GetSystemMetrics(SM_XVIRTUALSCREEN),
+ GetSystemMetrics(SM_YVIRTUALSCREEN),
+ GetSystemMetrics(SM_CXVIRTUALSCREEN),
+ GetSystemMetrics(SM_CYVIRTUALSCREEN));
+ }
+
+ DISPLAY_DEVICE device;
+ device.cb = sizeof(device);
+ BOOL result = EnumDisplayDevices(NULL, screen, &device, 0);
+ if (!result)
+ return DesktopRect();
+
+ // Verifies the device index still maps to the same display device, to make
+ // sure we are capturing the same device when devices are added or removed.
+ // DeviceKey is documented as reserved, but it actually contains the registry
+ // key for the device and is unique for each monitor, while DeviceID is not.
+ if (device_key != device.DeviceKey)
+ return DesktopRect();
+
+ DEVMODE device_mode;
+ device_mode.dmSize = sizeof(device_mode);
+ device_mode.dmDriverExtra = 0;
+ result = EnumDisplaySettingsEx(
+ device.DeviceName, ENUM_CURRENT_SETTINGS, &device_mode, 0);
+ if (!result)
+ return DesktopRect();
+
+ return DesktopRect::MakeXYWH(device_mode.dmPosition.x,
+ device_mode.dmPosition.y,
+ device_mode.dmPelsWidth,
+ device_mode.dmPelsHeight);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capture_utils.h b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capture_utils.h
new file mode 100644
index 00000000000..42473e047b3
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capture_utils.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_SCREEN_CAPTURE_UTILS_H_
+#define WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_SCREEN_CAPTURE_UTILS_H_
+
+#include "webrtc/modules/desktop_capture/screen_capturer.h"
+
+namespace webrtc {
+
+// Output the list of active screens into |screens|. Returns true if succeeded,
+// or false if it fails to enumerate the display devices.
+bool GetScreenList(ScreenCapturer::ScreenList* screens);
+
+// Returns true if |screen| is a valid screen. The screen device key is
+// returned through |device_key| if the screen is valid. The device key can be
+// used in GetScreenRect to verify the screen matches the previously obtained
+// id.
+bool IsScreenValid(ScreenId screen, std::wstring* device_key);
+
+// Get the rect of the screen identified by |screen|, relative to the primary
+// display's top-left. If the screen device key does not match |device_key|, or
+// the screen does not exist, or any error happens, an empty rect is returned.
+DesktopRect GetScreenRect(ScreenId screen, const std::wstring& device_key);
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_SCREEN_CAPTURE_UTILS_H_
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc
new file mode 100644
index 00000000000..9cb3681fd2c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc
@@ -0,0 +1,324 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h"
+
+#include <assert.h>
+
+#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
+#include "webrtc/modules/desktop_capture/desktop_frame.h"
+#include "webrtc/modules/desktop_capture/desktop_frame_win.h"
+#include "webrtc/modules/desktop_capture/desktop_region.h"
+#include "webrtc/modules/desktop_capture/differ.h"
+#include "webrtc/modules/desktop_capture/mouse_cursor.h"
+#include "webrtc/modules/desktop_capture/win/cursor.h"
+#include "webrtc/modules/desktop_capture/win/desktop.h"
+#include "webrtc/modules/desktop_capture/win/screen_capture_utils.h"
+#include "webrtc/system_wrappers/interface/logging.h"
+#include "webrtc/system_wrappers/interface/tick_util.h"
+
+namespace webrtc {
+
+namespace {
+
+// Constants from dwmapi.h.
+const UINT DWM_EC_DISABLECOMPOSITION = 0;
+const UINT DWM_EC_ENABLECOMPOSITION = 1;
+
+const wchar_t kDwmapiLibraryName[] = L"dwmapi.dll";
+
+} // namespace
+
+ScreenCapturerWinGdi::ScreenCapturerWinGdi(const DesktopCaptureOptions& options)
+ : callback_(NULL),
+ mouse_shape_observer_(NULL),
+ current_screen_id_(kFullDesktopScreenId),
+ desktop_dc_(NULL),
+ memory_dc_(NULL),
+ dwmapi_library_(NULL),
+ composition_func_(NULL),
+ set_thread_execution_state_failed_(false) {
+ if (options.disable_effects()) {
+ // Load dwmapi.dll dynamically since it is not available on XP.
+ if (!dwmapi_library_)
+ dwmapi_library_ = LoadLibrary(kDwmapiLibraryName);
+
+ if (dwmapi_library_) {
+ composition_func_ = reinterpret_cast<DwmEnableCompositionFunc>(
+ GetProcAddress(dwmapi_library_, "DwmEnableComposition"));
+ }
+ }
+}
+
+ScreenCapturerWinGdi::~ScreenCapturerWinGdi() {
+ if (desktop_dc_)
+ ReleaseDC(NULL, desktop_dc_);
+ if (memory_dc_)
+ DeleteDC(memory_dc_);
+
+ // Restore Aero.
+ if (composition_func_)
+ (*composition_func_)(DWM_EC_ENABLECOMPOSITION);
+
+ if (dwmapi_library_)
+ FreeLibrary(dwmapi_library_);
+}
+
+void ScreenCapturerWinGdi::Capture(const DesktopRegion& region) {
+ TickTime capture_start_time = TickTime::Now();
+
+ queue_.MoveToNextFrame();
+
+ // Request that the system not power-down the system, or the display hardware.
+ if (!SetThreadExecutionState(ES_DISPLAY_REQUIRED | ES_SYSTEM_REQUIRED)) {
+ if (!set_thread_execution_state_failed_) {
+ set_thread_execution_state_failed_ = true;
+ LOG_F(LS_WARNING) << "Failed to make system & display power assertion: "
+ << GetLastError();
+ }
+ }
+
+ // Make sure the GDI capture resources are up-to-date.
+ PrepareCaptureResources();
+
+ if (!CaptureImage()) {
+ callback_->OnCaptureCompleted(NULL);
+ return;
+ }
+
+ const DesktopFrame* current_frame = queue_.current_frame();
+ const DesktopFrame* last_frame = queue_.previous_frame();
+ if (last_frame && last_frame->size().equals(current_frame->size())) {
+ // Make sure the differencer is set up correctly for these previous and
+ // current screens.
+ if (!differ_.get() ||
+ (differ_->width() != current_frame->size().width()) ||
+ (differ_->height() != current_frame->size().height()) ||
+ (differ_->bytes_per_row() != current_frame->stride())) {
+ differ_.reset(new Differ(current_frame->size().width(),
+ current_frame->size().height(),
+ DesktopFrame::kBytesPerPixel,
+ current_frame->stride()));
+ }
+
+ // Calculate difference between the two last captured frames.
+ DesktopRegion region;
+ differ_->CalcDirtyRegion(last_frame->data(), current_frame->data(),
+ &region);
+ helper_.InvalidateRegion(region);
+ } else {
+ // No previous frame is available, or the screen is resized. Invalidate the
+ // whole screen.
+ helper_.InvalidateScreen(current_frame->size());
+ }
+
+ helper_.set_size_most_recent(current_frame->size());
+
+ // Emit the current frame.
+ DesktopFrame* frame = queue_.current_frame()->Share();
+ frame->set_dpi(DesktopVector(
+ GetDeviceCaps(desktop_dc_, LOGPIXELSX),
+ GetDeviceCaps(desktop_dc_, LOGPIXELSY)));
+ frame->mutable_updated_region()->Clear();
+ helper_.TakeInvalidRegion(frame->mutable_updated_region());
+ frame->set_capture_time_ms(
+ (TickTime::Now() - capture_start_time).Milliseconds());
+ callback_->OnCaptureCompleted(frame);
+
+ // Check for cursor shape update.
+ CaptureCursor();
+}
+
+void ScreenCapturerWinGdi::SetMouseShapeObserver(
+ MouseShapeObserver* mouse_shape_observer) {
+ assert(!mouse_shape_observer_);
+ assert(mouse_shape_observer);
+
+ mouse_shape_observer_ = mouse_shape_observer;
+}
+
+bool ScreenCapturerWinGdi::GetScreenList(ScreenList* screens) {
+ return webrtc::GetScreenList(screens);
+}
+
+bool ScreenCapturerWinGdi::SelectScreen(ScreenId id) {
+ bool valid = IsScreenValid(id, &current_device_key_);
+ if (valid)
+ current_screen_id_ = id;
+ return valid;
+}
+
+void ScreenCapturerWinGdi::Start(Callback* callback) {
+ assert(!callback_);
+ assert(callback);
+
+ callback_ = callback;
+
+ // Vote to disable Aero composited desktop effects while capturing. Windows
+ // will restore Aero automatically if the process exits. This has no effect
+ // under Windows 8 or higher. See crbug.com/124018.
+ if (composition_func_)
+ (*composition_func_)(DWM_EC_DISABLECOMPOSITION);
+}
+
+void ScreenCapturerWinGdi::PrepareCaptureResources() {
+ // Switch to the desktop receiving user input if different from the current
+ // one.
+ scoped_ptr<Desktop> input_desktop(Desktop::GetInputDesktop());
+ if (input_desktop.get() != NULL && !desktop_.IsSame(*input_desktop)) {
+ // Release GDI resources otherwise SetThreadDesktop will fail.
+ if (desktop_dc_) {
+ ReleaseDC(NULL, desktop_dc_);
+ desktop_dc_ = NULL;
+ }
+
+ if (memory_dc_) {
+ DeleteDC(memory_dc_);
+ memory_dc_ = NULL;
+ }
+
+ // If SetThreadDesktop() fails, the thread is still assigned a desktop.
+ // So we can continue capture screen bits, just from the wrong desktop.
+ desktop_.SetThreadDesktop(input_desktop.release());
+
+ // Re-assert our vote to disable Aero.
+ // See crbug.com/124018 and crbug.com/129906.
+ if (composition_func_ != NULL) {
+ (*composition_func_)(DWM_EC_DISABLECOMPOSITION);
+ }
+ }
+
+ // If the display bounds have changed then recreate GDI resources.
+ // TODO(wez): Also check for pixel format changes.
+ DesktopRect screen_rect(DesktopRect::MakeXYWH(
+ GetSystemMetrics(SM_XVIRTUALSCREEN),
+ GetSystemMetrics(SM_YVIRTUALSCREEN),
+ GetSystemMetrics(SM_CXVIRTUALSCREEN),
+ GetSystemMetrics(SM_CYVIRTUALSCREEN)));
+ if (!screen_rect.equals(desktop_dc_rect_)) {
+ if (desktop_dc_) {
+ ReleaseDC(NULL, desktop_dc_);
+ desktop_dc_ = NULL;
+ }
+ if (memory_dc_) {
+ DeleteDC(memory_dc_);
+ memory_dc_ = NULL;
+ }
+ desktop_dc_rect_ = DesktopRect();
+ }
+
+ if (desktop_dc_ == NULL) {
+ assert(memory_dc_ == NULL);
+
+ // Create GDI device contexts to capture from the desktop into memory.
+ desktop_dc_ = GetDC(NULL);
+ if (!desktop_dc_)
+ abort();
+ memory_dc_ = CreateCompatibleDC(desktop_dc_);
+ if (!memory_dc_)
+ abort();
+
+ desktop_dc_rect_ = screen_rect;
+
+ // Make sure the frame buffers will be reallocated.
+ queue_.Reset();
+
+ helper_.ClearInvalidRegion();
+ }
+}
+
+bool ScreenCapturerWinGdi::CaptureImage() {
+ DesktopRect screen_rect =
+ GetScreenRect(current_screen_id_, current_device_key_);
+ if (screen_rect.is_empty())
+ return false;
+
+ DesktopSize size = screen_rect.size();
+ // If the current buffer is from an older generation then allocate a new one.
+ // Note that we can't reallocate other buffers at this point, since the caller
+ // may still be reading from them.
+ if (!queue_.current_frame() ||
+ !queue_.current_frame()->size().equals(screen_rect.size())) {
+ assert(desktop_dc_ != NULL);
+ assert(memory_dc_ != NULL);
+
+ size_t buffer_size = size.width() * size.height() *
+ DesktopFrame::kBytesPerPixel;
+ SharedMemory* shared_memory = callback_->CreateSharedMemory(buffer_size);
+
+ scoped_ptr<DesktopFrame> buffer;
+ buffer.reset(
+ DesktopFrameWin::Create(size, shared_memory, desktop_dc_));
+ queue_.ReplaceCurrentFrame(buffer.release());
+ }
+
+ // Select the target bitmap into the memory dc and copy the rect from desktop
+ // to memory.
+ DesktopFrameWin* current = static_cast<DesktopFrameWin*>(
+ queue_.current_frame()->GetUnderlyingFrame());
+ HGDIOBJ previous_object = SelectObject(memory_dc_, current->bitmap());
+ if (previous_object != NULL) {
+ BitBlt(memory_dc_,
+ 0, 0, screen_rect.width(), screen_rect.height(),
+ desktop_dc_,
+ screen_rect.left(), screen_rect.top(),
+ SRCCOPY | CAPTUREBLT);
+
+ // Select back the previously selected object to that the device contect
+ // could be destroyed independently of the bitmap if needed.
+ SelectObject(memory_dc_, previous_object);
+ }
+ return true;
+}
+
+void ScreenCapturerWinGdi::CaptureCursor() {
+ CURSORINFO cursor_info;
+ cursor_info.cbSize = sizeof(CURSORINFO);
+ if (!GetCursorInfo(&cursor_info)) {
+ LOG_F(LS_ERROR) << "Unable to get cursor info. Error = " << GetLastError();
+ return;
+ }
+
+ // Note that |cursor_info.hCursor| does not need to be freed.
+ scoped_ptr<MouseCursor> cursor_image(
+ CreateMouseCursorFromHCursor(desktop_dc_, cursor_info.hCursor));
+ if (!cursor_image.get())
+ return;
+
+ scoped_ptr<MouseCursorShape> cursor(new MouseCursorShape);
+ cursor->hotspot = cursor_image->hotspot();
+ cursor->size = cursor_image->image()->size();
+ uint8_t* current_row = cursor_image->image()->data();
+ for (int y = 0; y < cursor_image->image()->size().height(); ++y) {
+ cursor->data.append(current_row,
+ current_row + cursor_image->image()->size().width() *
+ DesktopFrame::kBytesPerPixel);
+ current_row += cursor_image->image()->stride();
+ }
+
+ // Compare the current cursor with the last one we sent to the client. If
+ // they're the same, then don't bother sending the cursor again.
+ if (last_cursor_.size.equals(cursor->size) &&
+ last_cursor_.hotspot.equals(cursor->hotspot) &&
+ last_cursor_.data == cursor->data) {
+ return;
+ }
+
+ LOG(LS_VERBOSE) << "Sending updated cursor: " << cursor->size.width() << "x"
+ << cursor->size.height();
+
+ // Record the last cursor image that we sent to the client.
+ last_cursor_ = *cursor;
+
+ if (mouse_shape_observer_)
+ mouse_shape_observer_->OnCursorShapeChanged(cursor.release());
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h
new file mode 100644
index 00000000000..2db87d097ca
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.h
@@ -0,0 +1,99 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_SCREEN_CAPTURER_WIN_GDI_H_
+#define WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_SCREEN_CAPTURER_WIN_GDI_H_
+
+#include "webrtc/modules/desktop_capture/screen_capturer.h"
+
+#include <windows.h>
+
+#include "webrtc/modules/desktop_capture/mouse_cursor_shape.h"
+#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
+#include "webrtc/modules/desktop_capture/screen_capturer_helper.h"
+#include "webrtc/modules/desktop_capture/win/scoped_thread_desktop.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class Differ;
+class MouseShapeObserver;
+
+// ScreenCapturerWinGdi captures 32bit RGB using GDI.
+//
+// ScreenCapturerWinGdi is double-buffered as required by ScreenCapturer.
+class ScreenCapturerWinGdi : public ScreenCapturer {
+ public:
+ explicit ScreenCapturerWinGdi(const DesktopCaptureOptions& options);
+ virtual ~ScreenCapturerWinGdi();
+
+ // Overridden from ScreenCapturer:
+ virtual void Start(Callback* callback) OVERRIDE;
+ virtual void Capture(const DesktopRegion& region) OVERRIDE;
+ virtual void SetMouseShapeObserver(
+ MouseShapeObserver* mouse_shape_observer) OVERRIDE;
+ virtual bool GetScreenList(ScreenList* screens) OVERRIDE;
+ virtual bool SelectScreen(ScreenId id) OVERRIDE;
+
+ private:
+ typedef HRESULT (WINAPI * DwmEnableCompositionFunc)(UINT);
+
+ // Make sure that the device contexts match the screen configuration.
+ void PrepareCaptureResources();
+
+ // Captures the current screen contents into the current buffer. Returns true
+ // if succeeded.
+ bool CaptureImage();
+
+ // Capture the current cursor shape.
+ void CaptureCursor();
+
+ Callback* callback_;
+ MouseShapeObserver* mouse_shape_observer_;
+ ScreenId current_screen_id_;
+ std::wstring current_device_key_;
+
+ // A thread-safe list of invalid rectangles, and the size of the most
+ // recently captured screen.
+ ScreenCapturerHelper helper_;
+
+ // Snapshot of the last cursor bitmap we sent to the client. This is used
+ // to diff against the current cursor so we only send a cursor-change
+ // message when the shape has changed.
+ MouseCursorShape last_cursor_;
+
+ ScopedThreadDesktop desktop_;
+
+ // GDI resources used for screen capture.
+ HDC desktop_dc_;
+ HDC memory_dc_;
+
+ // Queue of the frames buffers.
+ ScreenCaptureFrameQueue queue_;
+
+ // Rectangle describing the bounds of the desktop device context, relative to
+ // the primary display's top-left.
+ DesktopRect desktop_dc_rect_;
+
+ // Class to calculate the difference between two screen bitmaps.
+ scoped_ptr<Differ> differ_;
+
+ HMODULE dwmapi_library_;
+ DwmEnableCompositionFunc composition_func_;
+
+ // Used to suppress duplicate logging of SetThreadExecutionState errors.
+ bool set_thread_execution_state_failed_;
+
+ DISALLOW_COPY_AND_ASSIGN(ScreenCapturerWinGdi);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_SCREEN_CAPTURER_WIN_GDI_H_
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc
new file mode 100644
index 00000000000..042cb937acc
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.cc
@@ -0,0 +1,461 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h"
+
+#include <assert.h>
+
+#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
+#include "webrtc/modules/desktop_capture/desktop_frame.h"
+#include "webrtc/modules/desktop_capture/desktop_frame_win.h"
+#include "webrtc/modules/desktop_capture/desktop_region.h"
+#include "webrtc/modules/desktop_capture/differ.h"
+#include "webrtc/modules/desktop_capture/mouse_cursor.h"
+#include "webrtc/modules/desktop_capture/win/cursor.h"
+#include "webrtc/modules/desktop_capture/win/desktop.h"
+#include "webrtc/modules/desktop_capture/win/screen_capture_utils.h"
+#include "webrtc/system_wrappers/interface/logging.h"
+#include "webrtc/system_wrappers/interface/tick_util.h"
+
+namespace webrtc {
+
+// kMagnifierWindowClass has to be "Magnifier" according to the Magnification
+// API. The other strings can be anything.
+static LPCTSTR kMagnifierHostClass = L"ScreenCapturerWinMagnifierHost";
+static LPCTSTR kHostWindowName = L"MagnifierHost";
+static LPCTSTR kMagnifierWindowClass = L"Magnifier";
+static LPCTSTR kMagnifierWindowName = L"MagnifierWindow";
+
+Atomic32 ScreenCapturerWinMagnifier::tls_index_(TLS_OUT_OF_INDEXES);
+
+ScreenCapturerWinMagnifier::ScreenCapturerWinMagnifier(
+ scoped_ptr<ScreenCapturer> fallback_capturer)
+ : fallback_capturer_(fallback_capturer.Pass()),
+ fallback_capturer_started_(false),
+ callback_(NULL),
+ current_screen_id_(kFullDesktopScreenId),
+ excluded_window_(NULL),
+ set_thread_execution_state_failed_(false),
+ desktop_dc_(NULL),
+ mag_lib_handle_(NULL),
+ mag_initialize_func_(NULL),
+ mag_uninitialize_func_(NULL),
+ set_window_source_func_(NULL),
+ set_window_filter_list_func_(NULL),
+ set_image_scaling_callback_func_(NULL),
+ host_window_(NULL),
+ magnifier_window_(NULL),
+ magnifier_initialized_(false),
+ magnifier_capture_succeeded_(true) {
+}
+
+ScreenCapturerWinMagnifier::~ScreenCapturerWinMagnifier() {
+ // DestroyWindow must be called before MagUninitialize. magnifier_window_ is
+ // destroyed automatically when host_window_ is destroyed.
+ if (host_window_)
+ DestroyWindow(host_window_);
+
+ if (magnifier_initialized_)
+ mag_uninitialize_func_();
+
+ if (mag_lib_handle_)
+ FreeLibrary(mag_lib_handle_);
+
+ if (desktop_dc_)
+ ReleaseDC(NULL, desktop_dc_);
+}
+
+void ScreenCapturerWinMagnifier::Start(Callback* callback) {
+ assert(!callback_);
+ assert(callback);
+ callback_ = callback;
+
+ InitializeMagnifier();
+}
+
+void ScreenCapturerWinMagnifier::Capture(const DesktopRegion& region) {
+ TickTime capture_start_time = TickTime::Now();
+
+ queue_.MoveToNextFrame();
+
+ // Request that the system not power-down the system, or the display hardware.
+ if (!SetThreadExecutionState(ES_DISPLAY_REQUIRED | ES_SYSTEM_REQUIRED)) {
+ if (!set_thread_execution_state_failed_) {
+ set_thread_execution_state_failed_ = true;
+ LOG_F(LS_WARNING) << "Failed to make system & display power assertion: "
+ << GetLastError();
+ }
+ }
+ // Switch to the desktop receiving user input if different from the current
+ // one.
+ scoped_ptr<Desktop> input_desktop(Desktop::GetInputDesktop());
+ if (input_desktop.get() != NULL && !desktop_.IsSame(*input_desktop)) {
+ // Release GDI resources otherwise SetThreadDesktop will fail.
+ if (desktop_dc_) {
+ ReleaseDC(NULL, desktop_dc_);
+ desktop_dc_ = NULL;
+ }
+ // If SetThreadDesktop() fails, the thread is still assigned a desktop.
+ // So we can continue capture screen bits, just from the wrong desktop.
+ desktop_.SetThreadDesktop(input_desktop.release());
+ }
+
+ bool succeeded = false;
+
+ // Do not try to use the magnfiier if it's capturing non-primary screen, or it
+ // failed before.
+ if (magnifier_initialized_ && IsCapturingPrimaryScreenOnly() &&
+ magnifier_capture_succeeded_) {
+ DesktopRect rect = GetScreenRect(current_screen_id_, current_device_key_);
+ CreateCurrentFrameIfNecessary(rect.size());
+
+ // CaptureImage may fail in some situations, e.g. windows8 metro mode.
+ succeeded = CaptureImage(rect);
+ }
+
+ // Defer to the fallback capturer if magnifier capturer did not work.
+ if (!succeeded) {
+ LOG_F(LS_WARNING) << "Switching to the fallback screen capturer.";
+ StartFallbackCapturer();
+ fallback_capturer_->Capture(region);
+ return;
+ }
+
+ const DesktopFrame* current_frame = queue_.current_frame();
+ const DesktopFrame* last_frame = queue_.previous_frame();
+ if (last_frame && last_frame->size().equals(current_frame->size())) {
+ // Make sure the differencer is set up correctly for these previous and
+ // current screens.
+ if (!differ_.get() || (differ_->width() != current_frame->size().width()) ||
+ (differ_->height() != current_frame->size().height()) ||
+ (differ_->bytes_per_row() != current_frame->stride())) {
+ differ_.reset(new Differ(current_frame->size().width(),
+ current_frame->size().height(),
+ DesktopFrame::kBytesPerPixel,
+ current_frame->stride()));
+ }
+
+ // Calculate difference between the two last captured frames.
+ DesktopRegion region;
+ differ_->CalcDirtyRegion(
+ last_frame->data(), current_frame->data(), &region);
+ helper_.InvalidateRegion(region);
+ } else {
+ // No previous frame is available, or the screen is resized. Invalidate the
+ // whole screen.
+ helper_.InvalidateScreen(current_frame->size());
+ }
+
+ helper_.set_size_most_recent(current_frame->size());
+
+ // Emit the current frame.
+ DesktopFrame* frame = queue_.current_frame()->Share();
+ frame->set_dpi(DesktopVector(GetDeviceCaps(desktop_dc_, LOGPIXELSX),
+ GetDeviceCaps(desktop_dc_, LOGPIXELSY)));
+ frame->mutable_updated_region()->Clear();
+ helper_.TakeInvalidRegion(frame->mutable_updated_region());
+ frame->set_capture_time_ms(
+ (TickTime::Now() - capture_start_time).Milliseconds());
+ callback_->OnCaptureCompleted(frame);
+}
+
+void ScreenCapturerWinMagnifier::SetMouseShapeObserver(
+ MouseShapeObserver* mouse_shape_observer) {
+ assert(false); // NOTREACHED();
+}
+
+bool ScreenCapturerWinMagnifier::GetScreenList(ScreenList* screens) {
+ return webrtc::GetScreenList(screens);
+}
+
+bool ScreenCapturerWinMagnifier::SelectScreen(ScreenId id) {
+ bool valid = IsScreenValid(id, &current_device_key_);
+
+ // Set current_screen_id_ even if the fallback capturer is being used, so we
+ // can switch back to the magnifier when possible.
+ if (valid)
+ current_screen_id_ = id;
+
+ if (fallback_capturer_started_)
+ fallback_capturer_->SelectScreen(id);
+
+ return valid;
+}
+
+void ScreenCapturerWinMagnifier::SetExcludedWindow(WindowId excluded_window) {
+ excluded_window_ = (HWND)excluded_window;
+ if (excluded_window_ && magnifier_initialized_) {
+ set_window_filter_list_func_(
+ magnifier_window_, MW_FILTERMODE_EXCLUDE, 1, &excluded_window_);
+ }
+}
+
+bool ScreenCapturerWinMagnifier::CaptureImage(const DesktopRect& rect) {
+ assert(magnifier_initialized_);
+
+ // Set the magnifier control to cover the captured rect. The content of the
+ // magnifier control will be the captured image.
+ BOOL result = SetWindowPos(magnifier_window_,
+ NULL,
+ rect.left(), rect.top(),
+ rect.width(), rect.height(),
+ 0);
+ if (!result) {
+ LOG_F(LS_WARNING) << "Failed to call SetWindowPos: " << GetLastError()
+ << ". Rect = {" << rect.left() << ", " << rect.top()
+ << ", " << rect.right() << ", " << rect.bottom() << "}";
+ return false;
+ }
+
+ magnifier_capture_succeeded_ = false;
+
+ RECT native_rect = {rect.left(), rect.top(), rect.right(), rect.bottom()};
+
+ // OnCaptured will be called via OnMagImageScalingCallback and fill in the
+ // frame before set_window_source_func_ returns.
+ result = set_window_source_func_(magnifier_window_, native_rect);
+
+ if (!result) {
+ LOG_F(LS_WARNING) << "Failed to call MagSetWindowSource: " << GetLastError()
+ << ". Rect = {" << rect.left() << ", " << rect.top()
+ << ", " << rect.right() << ", " << rect.bottom() << "}";
+ return false;
+ }
+
+ return magnifier_capture_succeeded_;
+}
+
+BOOL ScreenCapturerWinMagnifier::OnMagImageScalingCallback(
+ HWND hwnd,
+ void* srcdata,
+ MAGIMAGEHEADER srcheader,
+ void* destdata,
+ MAGIMAGEHEADER destheader,
+ RECT unclipped,
+ RECT clipped,
+ HRGN dirty) {
+ assert(tls_index_.Value() != TLS_OUT_OF_INDEXES);
+
+ ScreenCapturerWinMagnifier* owner =
+ reinterpret_cast<ScreenCapturerWinMagnifier*>(
+ TlsGetValue(tls_index_.Value()));
+
+ owner->OnCaptured(srcdata, srcheader);
+
+ return TRUE;
+}
+
+bool ScreenCapturerWinMagnifier::InitializeMagnifier() {
+ assert(!magnifier_initialized_);
+
+ desktop_dc_ = GetDC(NULL);
+
+ mag_lib_handle_ = LoadLibrary(L"Magnification.dll");
+ if (!mag_lib_handle_)
+ return false;
+
+ // Initialize Magnification API function pointers.
+ mag_initialize_func_ = reinterpret_cast<MagInitializeFunc>(
+ GetProcAddress(mag_lib_handle_, "MagInitialize"));
+ mag_uninitialize_func_ = reinterpret_cast<MagUninitializeFunc>(
+ GetProcAddress(mag_lib_handle_, "MagUninitialize"));
+ set_window_source_func_ = reinterpret_cast<MagSetWindowSourceFunc>(
+ GetProcAddress(mag_lib_handle_, "MagSetWindowSource"));
+ set_window_filter_list_func_ = reinterpret_cast<MagSetWindowFilterListFunc>(
+ GetProcAddress(mag_lib_handle_, "MagSetWindowFilterList"));
+ set_image_scaling_callback_func_ =
+ reinterpret_cast<MagSetImageScalingCallbackFunc>(
+ GetProcAddress(mag_lib_handle_, "MagSetImageScalingCallback"));
+
+ if (!mag_initialize_func_ || !mag_uninitialize_func_ ||
+ !set_window_source_func_ || !set_window_filter_list_func_ ||
+ !set_image_scaling_callback_func_) {
+ LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
+ << "library functions missing.";
+ return false;
+ }
+
+ BOOL result = mag_initialize_func_();
+ if (!result) {
+ LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
+ << "error from MagInitialize " << GetLastError();
+ return false;
+ }
+
+ HMODULE hInstance = NULL;
+ result = GetModuleHandleExA(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS |
+ GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT,
+ reinterpret_cast<char*>(&DefWindowProc),
+ &hInstance);
+ if (!result) {
+ mag_uninitialize_func_();
+ LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
+ << "error from GetModulehandleExA " << GetLastError();
+ return false;
+ }
+
+ // Register the host window class. See the MSDN documentation of the
+ // Magnification API for more infomation.
+ WNDCLASSEX wcex = {};
+ wcex.cbSize = sizeof(WNDCLASSEX);
+ wcex.lpfnWndProc = &DefWindowProc;
+ wcex.hInstance = hInstance;
+ wcex.hCursor = LoadCursor(NULL, IDC_ARROW);
+ wcex.lpszClassName = kMagnifierHostClass;
+
+ // Ignore the error which may happen when the class is already registered.
+ RegisterClassEx(&wcex);
+
+ // Create the host window.
+ host_window_ = CreateWindowEx(WS_EX_LAYERED,
+ kMagnifierHostClass,
+ kHostWindowName,
+ 0,
+ 0, 0, 0, 0,
+ NULL,
+ NULL,
+ hInstance,
+ NULL);
+ if (!host_window_) {
+ mag_uninitialize_func_();
+ LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
+ << "error from creating host window " << GetLastError();
+ return false;
+ }
+
+ // Create the magnifier control.
+ magnifier_window_ = CreateWindow(kMagnifierWindowClass,
+ kMagnifierWindowName,
+ WS_CHILD | WS_VISIBLE,
+ 0, 0, 0, 0,
+ host_window_,
+ NULL,
+ hInstance,
+ NULL);
+ if (!magnifier_window_) {
+ mag_uninitialize_func_();
+ LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
+ << "error from creating magnifier window "
+ << GetLastError();
+ return false;
+ }
+
+ // Hide the host window.
+ ShowWindow(host_window_, SW_HIDE);
+
+ // Set the scaling callback to receive captured image.
+ result = set_image_scaling_callback_func_(
+ magnifier_window_,
+ &ScreenCapturerWinMagnifier::OnMagImageScalingCallback);
+ if (!result) {
+ mag_uninitialize_func_();
+ LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
+ << "error from MagSetImageScalingCallback "
+ << GetLastError();
+ return false;
+ }
+
+ if (excluded_window_) {
+ result = set_window_filter_list_func_(
+ magnifier_window_, MW_FILTERMODE_EXCLUDE, 1, &excluded_window_);
+ if (!result) {
+ mag_uninitialize_func_();
+ LOG_F(LS_WARNING) << "Failed to initialize ScreenCapturerWinMagnifier: "
+ << "error from MagSetWindowFilterList "
+ << GetLastError();
+ return false;
+ }
+ }
+
+ if (tls_index_.Value() == TLS_OUT_OF_INDEXES) {
+ // More than one threads may get here at the same time, but only one will
+ // write to tls_index_ using CompareExchange.
+ DWORD new_tls_index = TlsAlloc();
+ if (!tls_index_.CompareExchange(new_tls_index, TLS_OUT_OF_INDEXES))
+ TlsFree(new_tls_index);
+ }
+
+ assert(tls_index_.Value() != TLS_OUT_OF_INDEXES);
+ TlsSetValue(tls_index_.Value(), this);
+
+ magnifier_initialized_ = true;
+ return true;
+}
+
+void ScreenCapturerWinMagnifier::OnCaptured(void* data,
+ const MAGIMAGEHEADER& header) {
+ DesktopFrame* current_frame = queue_.current_frame();
+
+ // Verify the format.
+ // TODO(jiayl): support capturing sources with pixel formats other than RGBA.
+ int captured_bytes_per_pixel = header.cbSize / header.width / header.height;
+ if (header.format != GUID_WICPixelFormat32bppRGBA ||
+ header.width != static_cast<UINT>(current_frame->size().width()) ||
+ header.height != static_cast<UINT>(current_frame->size().height()) ||
+ header.stride != static_cast<UINT>(current_frame->stride()) ||
+ captured_bytes_per_pixel != DesktopFrame::kBytesPerPixel) {
+ LOG_F(LS_WARNING) << "Output format does not match the captured format: "
+ << "width = " << header.width << ", "
+ << "height = " << header.height << ", "
+ << "stride = " << header.stride << ", "
+ << "bpp = " << captured_bytes_per_pixel << ", "
+ << "pixel format RGBA ? "
+ << (header.format == GUID_WICPixelFormat32bppRGBA) << ".";
+ return;
+ }
+
+ // Copy the data into the frame.
+ current_frame->CopyPixelsFrom(
+ reinterpret_cast<uint8_t*>(data),
+ header.stride,
+ DesktopRect::MakeXYWH(0, 0, header.width, header.height));
+
+ magnifier_capture_succeeded_ = true;
+}
+
+void ScreenCapturerWinMagnifier::CreateCurrentFrameIfNecessary(
+ const DesktopSize& size) {
+ // If the current buffer is from an older generation then allocate a new one.
+ // Note that we can't reallocate other buffers at this point, since the caller
+ // may still be reading from them.
+ if (!queue_.current_frame() || !queue_.current_frame()->size().equals(size)) {
+ size_t buffer_size =
+ size.width() * size.height() * DesktopFrame::kBytesPerPixel;
+ SharedMemory* shared_memory = callback_->CreateSharedMemory(buffer_size);
+
+ scoped_ptr<DesktopFrame> buffer;
+ if (shared_memory) {
+ buffer.reset(new SharedMemoryDesktopFrame(
+ size, size.width() * DesktopFrame::kBytesPerPixel, shared_memory));
+ } else {
+ buffer.reset(new BasicDesktopFrame(size));
+ }
+ queue_.ReplaceCurrentFrame(buffer.release());
+ }
+}
+
+bool ScreenCapturerWinMagnifier::IsCapturingPrimaryScreenOnly() const {
+ if (current_screen_id_ != kFullDesktopScreenId)
+ return current_screen_id_ == 0; // the primary screen is always '0'.
+
+ return GetSystemMetrics(SM_CMONITORS) == 1;
+}
+
+void ScreenCapturerWinMagnifier::StartFallbackCapturer() {
+ assert(fallback_capturer_);
+ if (!fallback_capturer_started_) {
+ fallback_capturer_started_ = true;
+
+ fallback_capturer_->Start(callback_);
+ fallback_capturer_->SelectScreen(current_screen_id_);
+ }
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h
new file mode 100644
index 00000000000..b6d559083eb
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/screen_capturer_win_magnifier.h
@@ -0,0 +1,159 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_SCREEN_CAPTURER_WIN_MAGNIFIER_H_
+#define WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_SCREEN_CAPTURER_WIN_MAGNIFIER_H_
+
+#include <windows.h>
+#include <magnification.h>
+#include <wincodec.h>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/modules/desktop_capture/screen_capture_frame_queue.h"
+#include "webrtc/modules/desktop_capture/screen_capturer.h"
+#include "webrtc/modules/desktop_capture/screen_capturer_helper.h"
+#include "webrtc/modules/desktop_capture/win/scoped_thread_desktop.h"
+#include "webrtc/system_wrappers/interface/atomic32.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class DesktopFrame;
+class DesktopRect;
+class Differ;
+class MouseShapeObserver;
+
+// Captures the screen using the Magnification API to support window exclusion.
+// Each capturer must run on a dedicated thread because it uses thread local
+// storage for redirecting the library callback. Also the thread must have a UI
+// message loop to handle the window messages for the magnifier window.
+class ScreenCapturerWinMagnifier : public ScreenCapturer {
+ public:
+ // |fallback_capturer| will be used to capture the screen if a non-primary
+ // screen is being captured, or the OS does not support Magnification API, or
+ // the magnifier capturer fails (e.g. in Windows8 Metro mode).
+ explicit ScreenCapturerWinMagnifier(
+ scoped_ptr<ScreenCapturer> fallback_capturer);
+ virtual ~ScreenCapturerWinMagnifier();
+
+ // Overridden from ScreenCapturer:
+ virtual void Start(Callback* callback) OVERRIDE;
+ virtual void Capture(const DesktopRegion& region) OVERRIDE;
+ virtual void SetMouseShapeObserver(
+ MouseShapeObserver* mouse_shape_observer) OVERRIDE;
+ virtual bool GetScreenList(ScreenList* screens) OVERRIDE;
+ virtual bool SelectScreen(ScreenId id) OVERRIDE;
+ virtual void SetExcludedWindow(WindowId window) OVERRIDE;
+
+ private:
+ typedef BOOL(WINAPI* MagImageScalingCallback)(HWND hwnd,
+ void* srcdata,
+ MAGIMAGEHEADER srcheader,
+ void* destdata,
+ MAGIMAGEHEADER destheader,
+ RECT unclipped,
+ RECT clipped,
+ HRGN dirty);
+ typedef BOOL(WINAPI* MagInitializeFunc)(void);
+ typedef BOOL(WINAPI* MagUninitializeFunc)(void);
+ typedef BOOL(WINAPI* MagSetWindowSourceFunc)(HWND hwnd, RECT rect);
+ typedef BOOL(WINAPI* MagSetWindowFilterListFunc)(HWND hwnd,
+ DWORD dwFilterMode,
+ int count,
+ HWND* pHWND);
+ typedef BOOL(WINAPI* MagSetImageScalingCallbackFunc)(
+ HWND hwnd,
+ MagImageScalingCallback callback);
+
+ static BOOL WINAPI OnMagImageScalingCallback(HWND hwnd,
+ void* srcdata,
+ MAGIMAGEHEADER srcheader,
+ void* destdata,
+ MAGIMAGEHEADER destheader,
+ RECT unclipped,
+ RECT clipped,
+ HRGN dirty);
+
+ // Captures the screen within |rect| in the desktop coordinates. Returns true
+ // if succeeded.
+ // It can only capture the primary screen for now. The magnification library
+ // crashes under some screen configurations (e.g. secondary screen on top of
+ // primary screen) if it tries to capture a non-primary screen. The caller
+ // must make sure not calling it on non-primary screens.
+ bool CaptureImage(const DesktopRect& rect);
+
+ // Helper method for setting up the magnifier control. Returns true if
+ // succeeded.
+ bool InitializeMagnifier();
+
+ // Called by OnMagImageScalingCallback to output captured data.
+ void OnCaptured(void* data, const MAGIMAGEHEADER& header);
+
+ // Makes sure the current frame exists and matches |size|.
+ void CreateCurrentFrameIfNecessary(const DesktopSize& size);
+
+ // Returns true if we are capturing the primary screen only.
+ bool IsCapturingPrimaryScreenOnly() const;
+
+ // Start the fallback capturer and select the screen.
+ void StartFallbackCapturer();
+
+ static Atomic32 tls_index_;
+
+ scoped_ptr<ScreenCapturer> fallback_capturer_;
+ bool fallback_capturer_started_;
+ Callback* callback_;
+ ScreenId current_screen_id_;
+ std::wstring current_device_key_;
+ HWND excluded_window_;
+
+ // A thread-safe list of invalid rectangles, and the size of the most
+ // recently captured screen.
+ ScreenCapturerHelper helper_;
+
+ // Queue of the frames buffers.
+ ScreenCaptureFrameQueue queue_;
+
+ // Class to calculate the difference between two screen bitmaps.
+ scoped_ptr<Differ> differ_;
+
+ // Used to suppress duplicate logging of SetThreadExecutionState errors.
+ bool set_thread_execution_state_failed_;
+
+ ScopedThreadDesktop desktop_;
+
+ // Used for getting the screen dpi.
+ HDC desktop_dc_;
+
+ HMODULE mag_lib_handle_;
+ MagInitializeFunc mag_initialize_func_;
+ MagUninitializeFunc mag_uninitialize_func_;
+ MagSetWindowSourceFunc set_window_source_func_;
+ MagSetWindowFilterListFunc set_window_filter_list_func_;
+ MagSetImageScalingCallbackFunc set_image_scaling_callback_func_;
+
+ // The hidden window hosting the magnifier control.
+ HWND host_window_;
+ // The magnifier control that captures the screen.
+ HWND magnifier_window_;
+
+ // True if the magnifier control has been successfully initialized.
+ bool magnifier_initialized_;
+
+ // True if the last OnMagImageScalingCallback was called and handled
+ // successfully. Reset at the beginning of each CaptureImage call.
+ bool magnifier_capture_succeeded_;
+
+ DISALLOW_COPY_AND_ASSIGN(ScreenCapturerWinMagnifier);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_SCREEN_CAPTURER_WIN_MAGNIFIER_H_
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.cc b/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.cc
new file mode 100644
index 00000000000..03e021954b1
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.cc
@@ -0,0 +1,46 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/desktop_capture/win/window_capture_utils.h"
+
+namespace webrtc {
+
+bool
+GetCroppedWindowRect(HWND window,
+ DesktopRect* cropped_rect,
+ DesktopRect* original_rect) {
+ RECT rect;
+ if (!GetWindowRect(window, &rect)) {
+ return false;
+ }
+ WINDOWPLACEMENT window_placement;
+ window_placement.length = sizeof(window_placement);
+ if (!GetWindowPlacement(window, &window_placement)) {
+ return false;
+ }
+
+ *original_rect = DesktopRect::MakeLTRB(
+ rect.left, rect.top, rect.right, rect.bottom);
+
+ if (window_placement.showCmd & SW_SHOWMAXIMIZED) {
+ DesktopSize border = DesktopSize(GetSystemMetrics(SM_CXSIZEFRAME),
+ GetSystemMetrics(SM_CYSIZEFRAME));
+ *cropped_rect = DesktopRect::MakeLTRB(
+ rect.left + border.width(),
+ rect.top,
+ rect.right - border.width(),
+ rect.bottom - border.height());
+ } else {
+ *cropped_rect = *original_rect;
+ }
+ return true;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.h b/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.h
new file mode 100644
index 00000000000..2a3a470c59e
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/desktop_capture/win/window_capture_utils.h
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <windows.h>
+
+#include "webrtc/modules/desktop_capture/desktop_geometry.h"
+
+namespace webrtc {
+
+// Output the window rect, with the left/right/bottom frame border cropped if
+// the window is maximized. |cropped_rect| is the cropped rect relative to the
+// desktop. |original_rect| is the original rect returned from GetWindowRect.
+// Returns true if all API calls succeeded.
+bool GetCroppedWindowRect(HWND window,
+ DesktopRect* cropped_rect,
+ DesktopRect* original_rect);
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h
index 478c8ee99c3..ad75c88d5d8 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer.h
@@ -11,12 +11,12 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_WINDOW_CAPTURER_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_WINDOW_CAPTURER_H_
-#include <vector>
#include <string>
+#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/desktop_capture/desktop_capture_types.h"
#include "webrtc/modules/desktop_capture/desktop_capturer.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -49,6 +49,14 @@ class WindowCapturer : public DesktopCapturer {
// Select window to be captured. Returns false in case of a failure (e.g. if
// there is no window with the specified id).
virtual bool SelectWindow(WindowId id) = 0;
+
+ // Bring the selected window to the front. Returns false in case of a
+ // failure or no window selected.
+ // TODO(jiayl): remove the default impl when FakeWindowCapturer is updated in
+ // Chromium.
+ virtual bool BringSelectedWindowToFront() {
+ return true;
+ }
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.cc b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.mm
index 6268fc01156..d177fc40c7d 100755..100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_mac.mm
@@ -12,6 +12,7 @@
#include <assert.h>
#include <ApplicationServices/ApplicationServices.h>
+#include <Cocoa/Cocoa.h>
#include <CoreFoundation/CoreFoundation.h>
#include "webrtc/modules/desktop_capture/desktop_frame.h"
@@ -41,6 +42,18 @@ bool CFStringRefToUtf8(const CFStringRef string, std::string* str_utf8) {
return true;
}
+bool IsWindowValid(CGWindowID id) {
+ CFArrayRef window_id_array =
+ CFArrayCreate(NULL, reinterpret_cast<const void **>(&id), 1, NULL);
+ CFArrayRef window_array =
+ CGWindowListCreateDescriptionFromArray(window_id_array);
+ bool valid = window_array && CFArrayGetCount(window_array);
+ CFRelease(window_id_array);
+ CFRelease(window_array);
+
+ return valid;
+}
+
class WindowCapturerMac : public WindowCapturer {
public:
WindowCapturerMac();
@@ -49,6 +62,7 @@ class WindowCapturerMac : public WindowCapturer {
// WindowCapturer interface.
virtual bool GetWindowList(WindowList* windows) OVERRIDE;
virtual bool SelectWindow(WindowId id) OVERRIDE;
+ virtual bool BringSelectedWindowToFront() OVERRIDE;
// DesktopCapturer interface.
virtual void Start(Callback* callback) OVERRIDE;
@@ -113,24 +127,47 @@ bool WindowCapturerMac::GetWindowList(WindowList* windows) {
}
bool WindowCapturerMac::SelectWindow(WindowId id) {
- // Request description for the specified window to make sure |id| is valid.
+ if (!IsWindowValid(id))
+ return false;
+ window_id_ = id;
+ return true;
+}
+
+bool WindowCapturerMac::BringSelectedWindowToFront() {
+ if (!window_id_)
+ return false;
+
CGWindowID ids[1];
- ids[0] = id;
+ ids[0] = window_id_;
CFArrayRef window_id_array =
CFArrayCreate(NULL, reinterpret_cast<const void **>(&ids), 1, NULL);
+
CFArrayRef window_array =
CGWindowListCreateDescriptionFromArray(window_id_array);
- int results_count = window_array ? CFArrayGetCount(window_array) : 0;
- CFRelease(window_id_array);
- CFRelease(window_array);
-
- if (results_count == 0) {
+ if (window_array == NULL || 0 == CFArrayGetCount(window_array)) {
// Could not find the window. It might have been closed.
+ LOG(LS_INFO) << "Window not found";
+ CFRelease(window_id_array);
return false;
}
- window_id_ = id;
- return true;
+ CFDictionaryRef window = reinterpret_cast<CFDictionaryRef>(
+ CFArrayGetValueAtIndex(window_array, 0));
+ CFNumberRef pid_ref = reinterpret_cast<CFNumberRef>(
+ CFDictionaryGetValue(window, kCGWindowOwnerPID));
+
+ int pid;
+ CFNumberGetValue(pid_ref, kCFNumberIntType, &pid);
+
+ // TODO(jiayl): this will bring the process main window to the front. We
+ // should find a way to bring only the window to the front.
+ bool result =
+ [[NSRunningApplication runningApplicationWithProcessIdentifier: pid]
+ activateWithOptions: NSApplicationActivateIgnoringOtherApps];
+
+ CFRelease(window_id_array);
+ CFRelease(window_array);
+ return result;
}
void WindowCapturerMac::Start(Callback* callback) {
@@ -141,12 +178,16 @@ void WindowCapturerMac::Start(Callback* callback) {
}
void WindowCapturerMac::Capture(const DesktopRegion& region) {
+ if (!IsWindowValid(window_id_)) {
+ callback_->OnCaptureCompleted(NULL);
+ return;
+ }
+
CGImageRef window_image = CGWindowListCreateImage(
CGRectNull, kCGWindowListOptionIncludingWindow,
window_id_, kCGWindowImageBoundsIgnoreFraming);
if (!window_image) {
- CFRelease(window_image);
callback_->OnCaptureCompleted(NULL);
return;
}
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc
index 7bb1247ea9d..5f9010d2fd3 100755
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_null.cc
@@ -26,6 +26,7 @@ class WindowCapturerNull : public WindowCapturer {
// WindowCapturer interface.
virtual bool GetWindowList(WindowList* windows) OVERRIDE;
virtual bool SelectWindow(WindowId id) OVERRIDE;
+ virtual bool BringSelectedWindowToFront() OVERRIDE;
// DesktopCapturer interface.
virtual void Start(Callback* callback) OVERRIDE;
@@ -54,6 +55,11 @@ bool WindowCapturerNull::SelectWindow(WindowId id) {
return false;
}
+bool WindowCapturerNull::BringSelectedWindowToFront() {
+ // Not implemented yet.
+ return false;
+}
+
void WindowCapturerNull::Start(Callback* callback) {
assert(!callback_);
assert(callback);
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc
index 95f41db73b2..a0021856203 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_win.cc
@@ -11,9 +11,9 @@
#include "webrtc/modules/desktop_capture/window_capturer.h"
#include <assert.h>
-#include <windows.h>
#include "webrtc/modules/desktop_capture/desktop_frame_win.h"
+#include "webrtc/modules/desktop_capture/win/window_capture_utils.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@@ -89,6 +89,7 @@ class WindowCapturerWin : public WindowCapturer {
// WindowCapturer interface.
virtual bool GetWindowList(WindowList* windows) OVERRIDE;
virtual bool SelectWindow(WindowId id) OVERRIDE;
+ virtual bool BringSelectedWindowToFront() OVERRIDE;
// DesktopCapturer interface.
virtual void Start(Callback* callback) OVERRIDE;
@@ -157,6 +158,16 @@ bool WindowCapturerWin::SelectWindow(WindowId id) {
return true;
}
+bool WindowCapturerWin::BringSelectedWindowToFront() {
+ if (!window_)
+ return false;
+
+ if (!IsWindow(window_) || !IsWindowVisible(window_) || IsIconic(window_))
+ return false;
+
+ return SetForegroundWindow(window_) != 0;
+}
+
void WindowCapturerWin::Start(Callback* callback) {
assert(!callback_);
assert(callback);
@@ -171,15 +182,16 @@ void WindowCapturerWin::Capture(const DesktopRegion& region) {
return;
}
- // Stop capturing if the window has been minimized or hidden.
- if (IsIconic(window_) || !IsWindowVisible(window_)) {
+ // Stop capturing if the window has been closed or hidden.
+ if (!IsWindow(window_) || !IsWindowVisible(window_)) {
callback_->OnCaptureCompleted(NULL);
return;
}
- RECT rect;
- if (!GetWindowRect(window_, &rect)) {
- LOG(LS_WARNING) << "Failed to get window size: " << GetLastError();
+ DesktopRect original_rect;
+ DesktopRect cropped_rect;
+ if (!GetCroppedWindowRect(window_, &cropped_rect, &original_rect)) {
+ LOG(LS_WARNING) << "Failed to get window info: " << GetLastError();
callback_->OnCaptureCompleted(NULL);
return;
}
@@ -192,8 +204,7 @@ void WindowCapturerWin::Capture(const DesktopRegion& region) {
}
scoped_ptr<DesktopFrameWin> frame(DesktopFrameWin::Create(
- DesktopSize(rect.right - rect.left, rect.bottom - rect.top),
- NULL, window_dc));
+ cropped_rect.size(), NULL, window_dc));
if (!frame.get()) {
ReleaseDC(window_, window_dc);
callback_->OnCaptureCompleted(NULL);
@@ -201,7 +212,7 @@ void WindowCapturerWin::Capture(const DesktopRegion& region) {
}
HDC mem_dc = CreateCompatibleDC(window_dc);
- SelectObject(mem_dc, frame->bitmap());
+ HGDIOBJ previous_object = SelectObject(mem_dc, frame->bitmap());
BOOL result = FALSE;
// When desktop composition (Aero) is enabled each window is rendered to a
@@ -217,21 +228,24 @@ void WindowCapturerWin::Capture(const DesktopRegion& region) {
// When composition is enabled the DC returned by GetWindowDC() doesn't always
// have window frame rendered correctly. Windows renders it only once and then
// caches the result between captures. We hack it around by calling
- // PrintWindow() whenever window size changes - it somehow affects what we
- // get from BitBlt() on the subsequent captures.
+ // PrintWindow() whenever window size changes, including the first time of
+ // capturing - it somehow affects what we get from BitBlt() on the subsequent
+ // captures.
- if (!IsAeroEnabled() ||
- (!previous_size_.is_empty() && !previous_size_.equals(frame->size()))) {
+ if (!IsAeroEnabled() || !previous_size_.equals(frame->size())) {
result = PrintWindow(window_, mem_dc, 0);
}
// Aero is enabled or PrintWindow() failed, use BitBlt.
if (!result) {
result = BitBlt(mem_dc, 0, 0, frame->size().width(), frame->size().height(),
- window_dc, 0, 0, SRCCOPY);
+ window_dc,
+ cropped_rect.left() - original_rect.left(),
+ cropped_rect.top() - original_rect.top(),
+ SRCCOPY);
}
- SelectObject(mem_dc, NULL);
+ SelectObject(mem_dc, previous_object);
DeleteDC(mem_dc);
ReleaseDC(window_, window_dc);
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc
index 5a14356fd16..b641c932180 100755
--- a/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/window_capturer_x11.cc
@@ -10,13 +10,14 @@
#include "webrtc/modules/desktop_capture/window_capturer.h"
+#include <assert.h>
#include <string.h>
#include <X11/Xatom.h>
#include <X11/extensions/Xcomposite.h>
#include <X11/extensions/Xrender.h>
#include <X11/Xutil.h>
+
#include <algorithm>
-#include <cassert>
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
@@ -84,7 +85,8 @@ class XWindowProperty {
DISALLOW_COPY_AND_ASSIGN(XWindowProperty);
};
-class WindowCapturerLinux : public WindowCapturer {
+class WindowCapturerLinux : public WindowCapturer,
+ public SharedXDisplay::XEventHandler {
public:
WindowCapturerLinux(const DesktopCaptureOptions& options);
virtual ~WindowCapturerLinux();
@@ -92,11 +94,15 @@ class WindowCapturerLinux : public WindowCapturer {
// WindowCapturer interface.
virtual bool GetWindowList(WindowList* windows) OVERRIDE;
virtual bool SelectWindow(WindowId id) OVERRIDE;
+ virtual bool BringSelectedWindowToFront() OVERRIDE;
// DesktopCapturer interface.
virtual void Start(Callback* callback) OVERRIDE;
virtual void Capture(const DesktopRegion& region) OVERRIDE;
+ // SharedXDisplay::XEventHandler interface.
+ virtual bool HandleXEvent(const XEvent& event) OVERRIDE;
+
private:
Display* display() { return x_display_->display(); }
@@ -146,9 +152,13 @@ WindowCapturerLinux::WindowCapturerLinux(const DesktopCaptureOptions& options)
} else {
LOG(LS_INFO) << "Xcomposite extension not available or too old.";
}
+
+ x_display_->AddEventHandler(ConfigureNotify, this);
}
-WindowCapturerLinux::~WindowCapturerLinux() {}
+WindowCapturerLinux::~WindowCapturerLinux() {
+ x_display_->RemoveEventHandler(ConfigureNotify, this);
+}
bool WindowCapturerLinux::GetWindowList(WindowList* windows) {
WindowList result;
@@ -194,6 +204,9 @@ bool WindowCapturerLinux::SelectWindow(WindowId id) {
if (!x_server_pixel_buffer_.Init(display(), id))
return false;
+ // Tell the X server to send us window resizing events.
+ XSelectInput(display(), id, StructureNotifyMask);
+
selected_window_ = id;
// In addition to needing X11 server-side support for Xcomposite, it actually
@@ -208,6 +221,55 @@ bool WindowCapturerLinux::SelectWindow(WindowId id) {
return true;
}
+bool WindowCapturerLinux::BringSelectedWindowToFront() {
+ if (!selected_window_)
+ return false;
+
+ unsigned int num_children;
+ ::Window* children;
+ ::Window parent;
+ ::Window root;
+ // Find the root window to pass event to.
+ int status = XQueryTree(
+ display(), selected_window_, &root, &parent, &children, &num_children);
+ if (status == 0) {
+ LOG(LS_ERROR) << "Failed to query for the root window.";
+ return false;
+ }
+
+ if (children)
+ XFree(children);
+
+ XRaiseWindow(display(), selected_window_);
+
+ // Some window managers (e.g., metacity in GNOME) consider it illegal to
+ // raise a window without also giving it input focus with
+ // _NET_ACTIVE_WINDOW, so XRaiseWindow() on its own isn't enough.
+ Atom atom = XInternAtom(display(), "_NET_ACTIVE_WINDOW", True);
+ if (atom != None) {
+ XEvent xev;
+ xev.xclient.type = ClientMessage;
+ xev.xclient.serial = 0;
+ xev.xclient.send_event = True;
+ xev.xclient.window = selected_window_;
+ xev.xclient.message_type = atom;
+
+ // The format member is set to 8, 16, or 32 and specifies whether the
+ // data should be viewed as a list of bytes, shorts, or longs.
+ xev.xclient.format = 32;
+
+ memset(xev.xclient.data.l, 0, sizeof(xev.xclient.data.l));
+
+ XSendEvent(display(),
+ root,
+ False,
+ SubstructureRedirectMask | SubstructureNotifyMask,
+ &xev);
+ }
+ XFlush(display());
+ return true;
+}
+
void WindowCapturerLinux::Start(Callback* callback) {
assert(!callback_);
assert(callback);
@@ -216,6 +278,14 @@ void WindowCapturerLinux::Start(Callback* callback) {
}
void WindowCapturerLinux::Capture(const DesktopRegion& region) {
+ if (!x_server_pixel_buffer_.IsWindowValid()) {
+ LOG(LS_INFO) << "The window is no longer valid.";
+ callback_->OnCaptureCompleted(NULL);
+ return;
+ }
+
+ x_display_->ProcessPendingXEvents();
+
if (!has_composite_extension_) {
// Without the Xcomposite extension we capture when the whole window is
// visible on screen and not covered by any other window. This is not
@@ -235,6 +305,20 @@ void WindowCapturerLinux::Capture(const DesktopRegion& region) {
callback_->OnCaptureCompleted(frame);
}
+bool WindowCapturerLinux::HandleXEvent(const XEvent& event) {
+ if (event.type == ConfigureNotify) {
+ XConfigureEvent xce = event.xconfigure;
+ if (!DesktopSize(xce.width, xce.height).equals(
+ x_server_pixel_buffer_.window_size())) {
+ if (!x_server_pixel_buffer_.Init(display(), selected_window_)) {
+ LOG(LS_ERROR) << "Failed to initialize pixel buffer after resizing.";
+ }
+ return true;
+ }
+ }
+ return false;
+}
+
::Window WindowCapturerLinux::GetApplicationWindow(::Window window) {
// Get WM_STATE property of the window.
XWindowProperty<uint32_t> window_state(display(), window, wm_state_atom_);
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/x11/x_error_trap.h b/chromium/third_party/webrtc/modules/desktop_capture/x11/x_error_trap.h
index fd8346928c9..aa771145d59 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/x11/x_error_trap.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/x11/x_error_trap.h
@@ -13,7 +13,7 @@
#include <X11/Xlib.h>
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
+#include "webrtc/base/constructormagic.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.cc b/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.cc
index 6983a6dcced..be00fa7697e 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.cc
+++ b/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.cc
@@ -213,6 +213,18 @@ bool XServerPixelBuffer::InitPixmaps(int depth) {
return true;
}
+bool XServerPixelBuffer::IsWindowValid() const {
+ XWindowAttributes attributes;
+ {
+ XErrorTrap error_trap(display_);
+ if (!XGetWindowAttributes(display_, window_, &attributes) ||
+ error_trap.GetLastErrorAndDisable() != 0) {
+ return false;
+ }
+ }
+ return true;
+}
+
void XServerPixelBuffer::Synchronize() {
if (shm_segment_info_ && !shm_pixmap_) {
// XShmGetImage can fail if the display is being reconfigured.
diff --git a/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h b/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h
index b81096c8110..98f263f3a88 100644
--- a/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h
+++ b/chromium/third_party/webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h
@@ -40,6 +40,9 @@ class XServerPixelBuffer {
// Returns the size of the window the buffer was initialized for.
const DesktopSize& window_size() { return window_size_; }
+ // Returns true if the window can be found.
+ bool IsWindowValid() const;
+
// If shared memory is being used without pixmaps, synchronize this pixel
// buffer with the root window contents (otherwise, this is a no-op).
// This is to avoid doing a full-screen capture for each individual
diff --git a/chromium/third_party/webrtc/modules/interface/module_common_types.h b/chromium/third_party/webrtc/modules/interface/module_common_types.h
index 2494d68b9ff..2c947071045 100644
--- a/chromium/third_party/webrtc/modules/interface/module_common_types.h
+++ b/chromium/third_party/webrtc/modules/interface/module_common_types.h
@@ -16,8 +16,8 @@
#include <algorithm>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/common_types.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/typedefs.h"
#ifdef _WIN32
@@ -27,27 +27,6 @@
namespace webrtc {
-struct RTPHeaderExtension {
- bool hasTransmissionTimeOffset;
- int32_t transmissionTimeOffset;
- bool hasAbsoluteSendTime;
- uint32_t absoluteSendTime;
-};
-
-struct RTPHeader {
- bool markerBit;
- uint8_t payloadType;
- uint16_t sequenceNumber;
- uint32_t timestamp;
- uint32_t ssrc;
- uint8_t numCSRCs;
- uint32_t arrOfCSRCs[kRtpCsrcSize];
- uint8_t paddingLength;
- uint16_t headerLength;
- int payload_type_frequency;
- RTPHeaderExtension extension;
-};
-
struct RTPAudioHeader {
uint8_t numEnergy; // number of valid entries in arrOfEnergy
uint8_t arrOfEnergy[kRtpCsrcSize]; // one energy byte (0-9) per channel
@@ -124,6 +103,8 @@ struct WebRtcRTPHeader {
RTPHeader header;
FrameType frameType;
RTPTypeHeader type;
+ // NTP time of the capture time in local timebase in milliseconds.
+ int64_t ntp_time_ms;
};
class RTPFragmentationHeader {
@@ -686,6 +667,10 @@ class AudioFrame {
AudioFrame();
virtual ~AudioFrame() {}
+ // Resets all members to their default state (except does not modify the
+ // contents of |data_|).
+ void Reset();
+
// |interleaved_| is not changed by this method.
void UpdateFrame(int id, uint32_t timestamp, const int16_t* data,
int samples_per_channel, int sample_rate_hz,
@@ -703,13 +688,24 @@ class AudioFrame {
AudioFrame& operator-=(const AudioFrame& rhs);
int id_;
+ // RTP timestamp of the first sample in the AudioFrame.
uint32_t timestamp_;
+ // Time since the first frame in milliseconds.
+ // -1 represents an uninitialized value.
+ int64_t elapsed_time_ms_;
+ // NTP time of the estimated capture time in local timebase in milliseconds.
+ // -1 represents an uninitialized value.
+ int64_t ntp_time_ms_;
int16_t data_[kMaxDataSizeSamples];
int samples_per_channel_;
int sample_rate_hz_;
int num_channels_;
SpeechType speech_type_;
VADActivity vad_activity_;
+ // Note that there is no guarantee that |energy_| is correct. Any user of this
+ // member must verify that the value is correct.
+ // TODO(henrike) Remove |energy_|.
+ // See https://code.google.com/p/webrtc/issues/detail?id=3315.
uint32_t energy_;
bool interleaved_;
@@ -718,16 +714,25 @@ class AudioFrame {
};
inline AudioFrame::AudioFrame()
- : id_(-1),
- timestamp_(0),
- data_(),
- samples_per_channel_(0),
- sample_rate_hz_(0),
- num_channels_(1),
- speech_type_(kUndefined),
- vad_activity_(kVadUnknown),
- energy_(0xffffffff),
- interleaved_(true) {}
+ : data_() {
+ Reset();
+}
+
+inline void AudioFrame::Reset() {
+ id_ = -1;
+ // TODO(wu): Zero is a valid value for |timestamp_|. We should initialize
+ // to an invalid value, or add a new member to indicate invalidity.
+ timestamp_ = 0;
+ elapsed_time_ms_ = -1;
+ ntp_time_ms_ = -1;
+ samples_per_channel_ = 0;
+ sample_rate_hz_ = 0;
+ num_channels_ = 0;
+ speech_type_ = kUndefined;
+ vad_activity_ = kVadUnknown;
+ energy_ = 0xffffffff;
+ interleaved_ = true;
+}
inline void AudioFrame::UpdateFrame(int id, uint32_t timestamp,
const int16_t* data,
@@ -758,6 +763,8 @@ inline void AudioFrame::CopyFrom(const AudioFrame& src) {
id_ = src.id_;
timestamp_ = src.timestamp_;
+ elapsed_time_ms_ = src.elapsed_time_ms_;
+ ntp_time_ms_ = src.ntp_time_ms_;
samples_per_channel_ = src.samples_per_channel_;
sample_rate_hz_ = src.sample_rate_hz_;
speech_type_ = src.speech_type_;
diff --git a/chromium/third_party/webrtc/modules/media_file/source/OWNERS b/chromium/third_party/webrtc/modules/media_file/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/media_file/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/media_file/source/avi_file.cc b/chromium/third_party/webrtc/modules/media_file/source/avi_file.cc
index 92c51acce60..19baaa3b218 100644
--- a/chromium/third_party/webrtc/modules/media_file/source/avi_file.cc
+++ b/chromium/third_party/webrtc/modules/media_file/source/avi_file.cc
@@ -23,7 +23,6 @@
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/file_wrapper.h"
-#include "webrtc/system_wrappers/interface/list_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
// http://msdn2.microsoft.com/en-us/library/ms779636.aspx
@@ -178,8 +177,7 @@ AviFile::AviFile()
_videoCodecConfigParamsLength(0),
_videoStreamDataChunkPrefix(0),
_audioStreamDataChunkPrefix(0),
- _created(false),
- _indexList(new ListWrapper())
+ _created(false)
{
ResetComplexMembers();
}
@@ -188,7 +186,6 @@ AviFile::~AviFile()
{
Close();
- delete _indexList;
delete[] _videoCodecConfigParams;
delete _crit;
}
@@ -1712,21 +1709,11 @@ uint32_t AviFile::StreamAndTwoCharCodeToTag(int32_t streamNum,
void AviFile::ClearIndexList()
{
- while (!_indexList->Empty())
- {
- ListItem* listItem = _indexList->First();
- if (listItem == 0)
- {
- break;
- }
-
- AVIINDEXENTRY* item = static_cast<AVIINDEXENTRY*>(listItem->GetItem());
- if (item != NULL)
- {
- delete item;
- }
- _indexList->PopFront();
- }
+ for (IndexList::iterator iter = _indexList.begin();
+ iter != _indexList.end(); ++iter) {
+ delete *iter;
+ }
+ _indexList.clear();
}
void AviFile::AddChunkToIndexList(uint32_t inChunkId,
@@ -1734,7 +1721,7 @@ void AviFile::AddChunkToIndexList(uint32_t inChunkId,
uint32_t inOffset,
uint32_t inSize)
{
- _indexList->PushBack(new AVIINDEXENTRY(inChunkId, inFlags, inOffset,
+ _indexList.push_back(new AVIINDEXENTRY(inChunkId, inFlags, inOffset,
inSize));
}
@@ -1747,19 +1734,13 @@ void AviFile::WriteIndex()
_bytesWritten += PutLE32(0);
const size_t idxChunkSize = _bytesWritten;
- for (ListItem* listItem = _indexList->First();
- listItem != NULL;
- listItem = _indexList->Next(listItem))
- {
- const AVIINDEXENTRY* item =
- static_cast<AVIINDEXENTRY*>(listItem->GetItem());
- if (item != NULL)
- {
- _bytesWritten += PutLE32(item->ckid);
- _bytesWritten += PutLE32(item->dwFlags);
- _bytesWritten += PutLE32(item->dwChunkOffset);
- _bytesWritten += PutLE32(item->dwChunkLength);
- }
+ for (IndexList::iterator iter = _indexList.begin();
+ iter != _indexList.end(); ++iter) {
+ const AVIINDEXENTRY* item = *iter;
+ _bytesWritten += PutLE32(item->ckid);
+ _bytesWritten += PutLE32(item->dwFlags);
+ _bytesWritten += PutLE32(item->dwChunkOffset);
+ _bytesWritten += PutLE32(item->dwChunkLength);
}
PutLE32LengthFromCurrent(static_cast<long>(idxChunkSize));
}
diff --git a/chromium/third_party/webrtc/modules/media_file/source/avi_file.h b/chromium/third_party/webrtc/modules/media_file/source/avi_file.h
index a55fc187481..d8b10626dff 100644
--- a/chromium/third_party/webrtc/modules/media_file/source/avi_file.h
+++ b/chromium/third_party/webrtc/modules/media_file/source/avi_file.h
@@ -14,12 +14,12 @@
#define WEBRTC_MODULES_MEDIA_FILE_SOURCE_AVI_FILE_H_
#include <stdio.h>
+#include <list>
#include "webrtc/typedefs.h"
namespace webrtc {
class CriticalSectionWrapper;
-class ListWrapper;
struct AVISTREAMHEADER
{
@@ -194,6 +194,7 @@ private:
void WriteIndex();
private:
+ typedef std::list<AVIINDEXENTRY*> IndexList;
struct AVIMAINHEADER
{
AVIMAINHEADER();
@@ -269,7 +270,7 @@ private:
uint32_t _audioStreamDataChunkPrefix;
bool _created;
- ListWrapper* _indexList; // Elements are of type AVIINDEXENTRY.
+ IndexList _indexList;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/media_file/source/media_file_utility.cc b/chromium/third_party/webrtc/modules/media_file/source/media_file_utility.cc
index 85df0b3a40a..e8883c9a4ba 100644
--- a/chromium/third_party/webrtc/modules/media_file/source/media_file_utility.cc
+++ b/chromium/third_party/webrtc/modules/media_file/source/media_file_utility.cc
@@ -2521,6 +2521,7 @@ int32_t ModuleFileUtility::FileDurationMs(const char* fileName,
break;
}
#endif
+ break;
}
case kFileFormatPreencodedFile:
{
diff --git a/chromium/third_party/webrtc/modules/modules.gyp b/chromium/third_party/webrtc/modules/modules.gyp
index ef354abc0fe..8dec125b093 100644
--- a/chromium/third_party/webrtc/modules/modules.gyp
+++ b/chromium/third_party/webrtc/modules/modules.gyp
@@ -16,9 +16,8 @@
'audio_coding/codecs/isac/main/source/isac.gypi',
'audio_coding/codecs/isac/fix/source/isacfix.gypi',
'audio_coding/codecs/pcm16b/pcm16b.gypi',
- 'audio_coding/main/source/audio_coding_module.gypi',
+ 'audio_coding/main/acm2/audio_coding_module.gypi',
'audio_coding/neteq/neteq.gypi',
- 'audio_coding/neteq4/neteq.gypi',
'audio_conference_mixer/source/audio_conference_mixer.gypi',
'audio_device/audio_device.gypi',
'audio_processing/audio_processing.gypi',
@@ -43,6 +42,7 @@
'includes': [
'audio_coding/codecs/isac/isac_test.gypi',
'audio_coding/codecs/isac/isacfix_test.gypi',
+ 'audio_coding/codecs/tools/audio_codec_speed_tests.gypi',
'audio_processing/audio_processing_tests.gypi',
'rtp_rtcp/test/testFec/test_fec.gypi',
'video_coding/main/source/video_coding_test.gypi',
@@ -70,15 +70,13 @@
'dependencies': [
'audio_coding_module',
'audio_processing',
- 'audioproc_unittest_proto',
'bitrate_controller',
'CNG',
'desktop_capture',
'iSACFix',
'media_file',
- 'NetEq',
- 'NetEq4',
- 'NetEq4TestTools',
+ 'neteq',
+ 'neteq_test_tools',
'neteq_unittest_tools',
'paced_sender',
'PCM16B', # Needed by NetEq tests.
@@ -99,13 +97,14 @@
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/test/test.gyp:test_support_main',
'<(webrtc_root)/test/test.gyp:frame_generator',
+ '<(webrtc_root)/test/test.gyp:rtcp_packet_parser',
],
'sources': [
'audio_coding/main/acm2/acm_receiver_unittest.cc',
+ 'audio_coding/main/acm2/audio_coding_module_unittest.cc',
'audio_coding/main/acm2/call_statistics_unittest.cc',
'audio_coding/main/acm2/initial_delay_manager_unittest.cc',
'audio_coding/main/acm2/nack_unittest.cc',
- 'audio_coding/main/source/acm_neteq_unittest.cc',
'audio_coding/codecs/cng/cng_unittest.cc',
'audio_coding/codecs/isac/fix/source/filters_unittest.cc',
'audio_coding/codecs/isac/fix/source/filterbanks_unittest.cc',
@@ -113,47 +112,48 @@
'audio_coding/codecs/isac/fix/source/transform_unittest.cc',
'audio_coding/codecs/isac/main/source/isac_unittest.cc',
'audio_coding/codecs/opus/opus_unittest.cc',
- 'audio_coding/neteq4/audio_multi_vector_unittest.cc',
- 'audio_coding/neteq4/audio_vector_unittest.cc',
- 'audio_coding/neteq4/background_noise_unittest.cc',
- 'audio_coding/neteq4/buffer_level_filter_unittest.cc',
- 'audio_coding/neteq4/comfort_noise_unittest.cc',
- 'audio_coding/neteq4/decision_logic_unittest.cc',
- 'audio_coding/neteq4/decoder_database_unittest.cc',
- 'audio_coding/neteq4/delay_manager_unittest.cc',
- 'audio_coding/neteq4/delay_peak_detector_unittest.cc',
- 'audio_coding/neteq4/dsp_helper_unittest.cc',
- 'audio_coding/neteq4/dtmf_buffer_unittest.cc',
- 'audio_coding/neteq4/dtmf_tone_generator_unittest.cc',
- 'audio_coding/neteq4/expand_unittest.cc',
- 'audio_coding/neteq4/merge_unittest.cc',
- 'audio_coding/neteq4/neteq_external_decoder_unittest.cc',
- 'audio_coding/neteq4/neteq_impl_unittest.cc',
- 'audio_coding/neteq4/neteq_stereo_unittest.cc',
- 'audio_coding/neteq4/neteq_unittest.cc',
- 'audio_coding/neteq4/normal_unittest.cc',
- 'audio_coding/neteq4/packet_buffer_unittest.cc',
- 'audio_coding/neteq4/payload_splitter_unittest.cc',
- 'audio_coding/neteq4/post_decode_vad_unittest.cc',
- 'audio_coding/neteq4/random_vector_unittest.cc',
- 'audio_coding/neteq4/sync_buffer_unittest.cc',
- 'audio_coding/neteq4/timestamp_scaler_unittest.cc',
- 'audio_coding/neteq4/time_stretch_unittest.cc',
- 'audio_coding/neteq4/mock/mock_audio_decoder.h',
- 'audio_coding/neteq4/mock/mock_audio_vector.h',
- 'audio_coding/neteq4/mock/mock_buffer_level_filter.h',
- 'audio_coding/neteq4/mock/mock_decoder_database.h',
- 'audio_coding/neteq4/mock/mock_delay_manager.h',
- 'audio_coding/neteq4/mock/mock_delay_peak_detector.h',
- 'audio_coding/neteq4/mock/mock_dtmf_buffer.h',
- 'audio_coding/neteq4/mock/mock_dtmf_tone_generator.h',
- 'audio_coding/neteq4/mock/mock_external_decoder_pcm16b.h',
- 'audio_coding/neteq4/mock/mock_packet_buffer.h',
- 'audio_coding/neteq4/mock/mock_payload_splitter.h',
+ 'audio_coding/neteq/audio_classifier_unittest.cc',
+ 'audio_coding/neteq/audio_multi_vector_unittest.cc',
+ 'audio_coding/neteq/audio_vector_unittest.cc',
+ 'audio_coding/neteq/background_noise_unittest.cc',
+ 'audio_coding/neteq/buffer_level_filter_unittest.cc',
+ 'audio_coding/neteq/comfort_noise_unittest.cc',
+ 'audio_coding/neteq/decision_logic_unittest.cc',
+ 'audio_coding/neteq/decoder_database_unittest.cc',
+ 'audio_coding/neteq/delay_manager_unittest.cc',
+ 'audio_coding/neteq/delay_peak_detector_unittest.cc',
+ 'audio_coding/neteq/dsp_helper_unittest.cc',
+ 'audio_coding/neteq/dtmf_buffer_unittest.cc',
+ 'audio_coding/neteq/dtmf_tone_generator_unittest.cc',
+ 'audio_coding/neteq/expand_unittest.cc',
+ 'audio_coding/neteq/merge_unittest.cc',
+ 'audio_coding/neteq/neteq_external_decoder_unittest.cc',
+ 'audio_coding/neteq/neteq_impl_unittest.cc',
+ 'audio_coding/neteq/neteq_stereo_unittest.cc',
+ 'audio_coding/neteq/neteq_unittest.cc',
+ 'audio_coding/neteq/normal_unittest.cc',
+ 'audio_coding/neteq/packet_buffer_unittest.cc',
+ 'audio_coding/neteq/payload_splitter_unittest.cc',
+ 'audio_coding/neteq/post_decode_vad_unittest.cc',
+ 'audio_coding/neteq/random_vector_unittest.cc',
+ 'audio_coding/neteq/sync_buffer_unittest.cc',
+ 'audio_coding/neteq/timestamp_scaler_unittest.cc',
+ 'audio_coding/neteq/time_stretch_unittest.cc',
+ 'audio_coding/neteq/mock/mock_audio_decoder.h',
+ 'audio_coding/neteq/mock/mock_audio_vector.h',
+ 'audio_coding/neteq/mock/mock_buffer_level_filter.h',
+ 'audio_coding/neteq/mock/mock_decoder_database.h',
+ 'audio_coding/neteq/mock/mock_delay_manager.h',
+ 'audio_coding/neteq/mock/mock_delay_peak_detector.h',
+ 'audio_coding/neteq/mock/mock_dtmf_buffer.h',
+ 'audio_coding/neteq/mock/mock_dtmf_tone_generator.h',
+ 'audio_coding/neteq/mock/mock_external_decoder_pcm16b.h',
+ 'audio_coding/neteq/mock/mock_packet_buffer.h',
+ 'audio_coding/neteq/mock/mock_payload_splitter.h',
+ 'audio_coding/neteq/tools/packet_unittest.cc',
'audio_processing/aec/system_delay_unittest.cc',
'audio_processing/aec/echo_cancellation_unittest.cc',
'audio_processing/echo_cancellation_impl_unittest.cc',
- 'audio_processing/test/audio_processing_unittest.cc',
'audio_processing/utility/delay_estimator_unittest.cc',
'audio_processing/utility/ring_buffer_unittest.cc',
'bitrate_controller/bitrate_controller_unittest.cc',
@@ -167,19 +167,19 @@
'desktop_capture/screen_capturer_mock_objects.h',
'desktop_capture/screen_capturer_unittest.cc',
'desktop_capture/window_capturer_unittest.cc',
- "desktop_capture/win/cursor_unittest.cc",
- "desktop_capture/win/cursor_unittest_resources.h",
- "desktop_capture/win/cursor_unittest_resources.rc",
+ 'desktop_capture/win/cursor_unittest.cc',
+ 'desktop_capture/win/cursor_unittest_resources.h',
+ 'desktop_capture/win/cursor_unittest_resources.rc',
'media_file/source/media_file_unittest.cc',
'module_common_types_unittest.cc',
'pacing/paced_sender_unittest.cc',
+ 'remote_bitrate_estimator/bwe_simulations.cc',
'remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h',
'remote_bitrate_estimator/rate_statistics_unittest.cc',
'remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc',
'remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc',
'remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h',
'remote_bitrate_estimator/remote_bitrate_estimators_test.cc',
- 'remote_bitrate_estimator/rtp_to_ntp_unittest.cc',
'remote_bitrate_estimator/test/bwe_test_baselinefile.cc',
'remote_bitrate_estimator/test/bwe_test_baselinefile.h',
'remote_bitrate_estimator/test/bwe_test_fileutils.cc',
@@ -199,9 +199,11 @@
'rtp_rtcp/source/nack_rtx_unittest.cc',
'rtp_rtcp/source/producer_fec_unittest.cc',
'rtp_rtcp/source/receive_statistics_unittest.cc',
+ 'rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc',
'rtp_rtcp/source/rtcp_format_remb_unittest.cc',
- 'rtp_rtcp/source/rtcp_sender_unittest.cc',
+ 'rtp_rtcp/source/rtcp_packet_unittest.cc',
'rtp_rtcp/source/rtcp_receiver_unittest.cc',
+ 'rtp_rtcp/source/rtcp_sender_unittest.cc',
'rtp_rtcp/source/rtp_fec_unittest.cc',
'rtp_rtcp/source/rtp_format_vp8_unittest.cc',
'rtp_rtcp/source/rtp_format_vp8_test_helper.cc',
@@ -219,6 +221,7 @@
'rtp_rtcp/test/testAPI/test_api_rtcp.cc',
'rtp_rtcp/test/testAPI/test_api_video.cc',
'utility/source/audio_frame_operations_unittest.cc',
+ 'utility/source/file_player_unittests.cc',
'video_coding/codecs/test/packet_manipulator_unittest.cc',
'video_coding/codecs/test/stats_unittest.cc',
'video_coding/codecs/test/videoprocessor_unittest.cc',
@@ -278,6 +281,14 @@
}],
['enable_protobuf==1', {
'defines': [ 'WEBRTC_AUDIOPROC_DEBUG_DUMP' ],
+ 'dependencies': [
+ 'audioproc_unittest_proto',
+ ],
+ 'sources': [
+ 'audio_processing/audio_processing_impl_unittest.cc',
+ 'audio_processing/test/audio_processing_unittest.cc',
+ 'audio_processing/test/test_utils.h',
+ ],
}],
['build_libvpx==1', {
'dependencies': [
@@ -286,7 +297,7 @@
}],
# TODO(henrike): remove build_with_chromium==1 when the bots are
# using Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'dependencies': [
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
@@ -326,13 +337,14 @@
'audio_coding/main/test/EncodeDecodeTest.cc',
'audio_coding/main/test/iSACTest.cc',
'audio_coding/main/test/opus_test.cc',
+ 'audio_coding/main/test/PacketLossTest.cc',
'audio_coding/main/test/PCMFile.cc',
'audio_coding/main/test/RTPFile.cc',
'audio_coding/main/test/SpatialAudio.cc',
'audio_coding/main/test/TestAllCodecs.cc',
'audio_coding/main/test/target_delay_unittest.cc',
'audio_coding/main/test/Tester.cc',
- 'audio_coding/main/test/TestFEC.cc',
+ 'audio_coding/main/test/TestRedFec.cc',
'audio_coding/main/test/TestStereo.cc',
'audio_coding/main/test/TestVADDTX.cc',
'audio_coding/main/test/TimedTrace.cc',
@@ -346,7 +358,7 @@
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are
# using Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'dependencies': [
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
@@ -357,7 +369,7 @@
'conditions': [
# TODO(henrike): remove build_with_chromium==1 when the bots are using
# Chromium's buildbots.
- ['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
+ ['build_with_chromium==1 and OS=="android"', {
'targets': [
{
'target_name': 'modules_unittests_apk_target',
diff --git a/chromium/third_party/webrtc/modules/modules_tests.isolate b/chromium/third_party/webrtc/modules/modules_tests.isolate
index 7a051f66eb6..e5055f0d91e 100644
--- a/chromium/third_party/webrtc/modules/modules_tests.isolate
+++ b/chromium/third_party/webrtc/modules/modules_tests.isolate
@@ -8,32 +8,30 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../data/',
- '../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/modules_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../../DEPS',
- '../../resources/audio_coding/testfile32kHz.pcm',
- '../../resources/audio_coding/teststereo32kHz.pcm',
- '../../resources/foreman_cif.yuv',
- '../../resources/paris_qcif.yuv',
- '../../testing/test_env.py',
+ '<(DEPTH)/DEPS',
+ '<(DEPTH)/resources/audio_coding/testfile32kHz.pcm',
+ '<(DEPTH)/resources/audio_coding/teststereo32kHz.pcm',
+ '<(DEPTH)/resources/foreman_cif.yuv',
+ '<(DEPTH)/resources/paris_qcif.yuv',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/modules_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/modules/modules_unittests.isolate b/chromium/third_party/webrtc/modules/modules_unittests.isolate
index aa91238caaf..09ace1c1816 100644
--- a/chromium/third_party/webrtc/modules/modules_unittests.isolate
+++ b/chromium/third_party/webrtc/modules/modules_unittests.isolate
@@ -8,77 +8,105 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../data/',
- '../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
+ ],
+ 'isolate_dependency_tracked': [
+ '<(DEPTH)/resources/short_mixed_mono_48.dat',
+ '<(DEPTH)/resources/short_mixed_mono_48.pcm',
+ '<(DEPTH)/resources/short_mixed_stereo_48.dat',
+ '<(DEPTH)/resources/short_mixed_stereo_48.pcm',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/modules_unittests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../../DEPS',
- '../../data/audio_processing/output_data_float.pb',
- '../../data/voice_engine/audio_tiny48.wav',
- '../../resources/audio_coding/neteq4_network_stats.dat',
- '../../resources/audio_coding/neteq4_rtcp_stats.dat',
- '../../resources/audio_coding/neteq4_universal_ref.pcm',
- '../../resources/audio_coding/neteq_network_stats.dat',
- '../../resources/audio_coding/neteq_rtcp_stats.dat',
- '../../resources/audio_coding/neteq_universal_new.rtp',
- '../../resources/audio_coding/neteq_universal_ref.pcm',
- '../../resources/audio_coding/testfile32kHz.pcm',
- '../../resources/deflicker_before_cif_short.yuv',
- '../../resources/far16_stereo.pcm',
- '../../resources/far32_stereo.pcm',
- '../../resources/far8_stereo.pcm',
- '../../resources/foremanColorEnhanced_cif_short.yuv',
- '../../resources/foreman_cif.yuv',
- '../../resources/foreman_cif_short.yuv',
- '../../resources/near16_stereo.pcm',
- '../../resources/near32_stereo.pcm',
- '../../resources/near8_stereo.pcm',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_0_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_0_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_1_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_1_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_0_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_0_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_1_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_1_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingDelay1_0_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingDelay1_0_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingLoss1_0_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingLoss1_0_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_Multi1_1_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_Multi1_1_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_0_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_0_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_1_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_1_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyDelay_0_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyDelay_0_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyLoss_0_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyLoss_0_TOF.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_UnlimitedSpeed_0_AST.bin',
- '../../resources/remote_bitrate_estimator/VideoSendersTest_BweTest_UnlimitedSpeed_0_TOF.bin',
- '../../resources/video_coding/frame-ethernet-ii.pcap',
- '../../resources/video_coding/frame-loopback.pcap',
- '../../resources/video_coding/pltype103.rtp',
- '../../resources/video_coding/ssrcs-2.pcap',
- '../../resources/video_coding/ssrcs-3.pcap',
- '../../testing/test_env.py',
+ '<(DEPTH)/DEPS',
+ '<(DEPTH)/data/audio_processing/output_data_float.pb',
+ '<(DEPTH)/data/voice_engine/audio_tiny48.wav',
+ '<(DEPTH)/resources/att-downlink.rx',
+ '<(DEPTH)/resources/att-uplink.rx',
+ '<(DEPTH)/resources/audio_coding/neteq4_network_stats.dat',
+ '<(DEPTH)/resources/audio_coding/neteq4_rtcp_stats.dat',
+ '<(DEPTH)/resources/audio_coding/neteq4_universal_ref.pcm',
+ '<(DEPTH)/resources/audio_coding/neteq4_universal_ref_win_32.pcm',
+ '<(DEPTH)/resources/audio_coding/neteq4_universal_ref_win_64.pcm',
+ '<(DEPTH)/resources/audio_coding/neteq_network_stats.dat',
+ '<(DEPTH)/resources/audio_coding/neteq_rtcp_stats.dat',
+ '<(DEPTH)/resources/audio_coding/neteq_universal_new.rtp',
+ '<(DEPTH)/resources/audio_coding/neteq_universal_ref.pcm',
+ '<(DEPTH)/resources/audio_coding/testfile32kHz.pcm',
+ '<(DEPTH)/resources/deflicker_before_cif_short.yuv',
+ '<(DEPTH)/resources/far16_stereo.pcm',
+ '<(DEPTH)/resources/far32_stereo.pcm',
+ '<(DEPTH)/resources/far44_stereo.pcm',
+ '<(DEPTH)/resources/far48_stereo.pcm',
+ '<(DEPTH)/resources/far8_stereo.pcm',
+ '<(DEPTH)/resources/foremanColorEnhanced_cif_short.yuv',
+ '<(DEPTH)/resources/foreman_cif.yuv',
+ '<(DEPTH)/resources/foreman_cif_short.yuv',
+ '<(DEPTH)/resources/near16_stereo.pcm',
+ '<(DEPTH)/resources/near32_stereo.pcm',
+ '<(DEPTH)/resources/near44_stereo.pcm',
+ '<(DEPTH)/resources/near48_stereo.pcm',
+ '<(DEPTH)/resources/near8_stereo.pcm',
+ '<(DEPTH)/resources/ref03.aecdump',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_0_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_0_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_1_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke1_1_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_0_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_0_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_1_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingChoke2_1_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingDelay1_0_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingDelay1_0_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingLoss1_0_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_IncreasingLoss1_0_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_Multi1_1_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_Multi1_1_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_0_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_0_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_1_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyChoke_1_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyDelay_0_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyDelay_0_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyLoss_0_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_SteadyLoss_0_TOF.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_UnlimitedSpeed_0_AST.bin',
+ '<(DEPTH)/resources/remote_bitrate_estimator/VideoSendersTest_BweTest_UnlimitedSpeed_0_TOF.bin',
+ '<(DEPTH)/resources/short_mixed_mono_48.dat',
+ '<(DEPTH)/resources/short_mixed_mono_48.pcm',
+ '<(DEPTH)/resources/short_mixed_stereo_48.dat',
+ '<(DEPTH)/resources/short_mixed_stereo_48.pcm',
+ '<(DEPTH)/resources/sprint-downlink.rx',
+ '<(DEPTH)/resources/sprint-uplink.rx',
+ '<(DEPTH)/resources/synthetic-trace.rx',
+ '<(DEPTH)/resources/tmobile-downlink.rx',
+ '<(DEPTH)/resources/tmobile-uplink.rx',
+ '<(DEPTH)/resources/utility/encapsulated_pcm16b_8khz.wav',
+ '<(DEPTH)/resources/utility/encapsulated_pcmu_8khz.wav',
+ '<(DEPTH)/resources/verizon3g-downlink.rx',
+ '<(DEPTH)/resources/verizon3g-uplink.rx',
+ '<(DEPTH)/resources/verizon4g-downlink.rx',
+ '<(DEPTH)/resources/verizon4g-uplink.rx',
+ '<(DEPTH)/resources/video_coding/frame-ethernet-ii.pcap',
+ '<(DEPTH)/resources/video_coding/frame-loopback.pcap',
+ '<(DEPTH)/resources/video_coding/pltype103.rtp',
+ '<(DEPTH)/resources/video_coding/ssrcs-2.pcap',
+ '<(DEPTH)/resources/video_coding/ssrcs-3.pcap',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/modules_unittests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/modules/pacing/OWNERS b/chromium/third_party/webrtc/modules/pacing/OWNERS
index 933a045009b..1426abc6215 100644
--- a/chromium/third_party/webrtc/modules/pacing/OWNERS
+++ b/chromium/third_party/webrtc/modules/pacing/OWNERS
@@ -2,3 +2,8 @@ pwestin@webrtc.org
stefan@webrtc.org
mflodman@webrtc.org
asapersson@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/pacing/include/paced_sender.h b/chromium/third_party/webrtc/modules/pacing/include/paced_sender.h
index 045469009f0..95f1a86e8f6 100644
--- a/chromium/third_party/webrtc/modules/pacing/include/paced_sender.h
+++ b/chromium/third_party/webrtc/modules/pacing/include/paced_sender.h
@@ -49,14 +49,14 @@ class PacedSender : public Module {
bool retransmission) = 0;
// Called when it's a good time to send a padding data.
virtual int TimeToSendPadding(int bytes) = 0;
+
protected:
virtual ~Callback() {}
};
static const int kDefaultMaxQueueLengthMs = 2000;
- PacedSender(Callback* callback, int target_bitrate_kbps,
- float pace_multiplier);
+ PacedSender(Callback* callback, int max_bitrate_kbps, int min_bitrate_kbps);
virtual ~PacedSender();
@@ -71,13 +71,9 @@ class PacedSender : public Module {
// Resume sending packets.
void Resume();
- // Set the pacing target bitrate and the bitrate up to which we are allowed to
- // pad. We will send padding packets to increase the total bitrate until we
- // reach |pad_up_to_bitrate_kbps|. If the media bitrate is above
- // |pad_up_to_bitrate_kbps| no padding will be sent.
- void UpdateBitrate(int target_bitrate_kbps,
- int max_padding_bitrate_kbps,
- int pad_up_to_bitrate_kbps);
+ // Set target bitrates for the pacer. Padding packets will be utilized to
+ // reach |min_bitrate| unless enough media packets are available.
+ void UpdateBitrate(int max_bitrate_kbps, int min_bitrate_kbps);
// Returns true if we send the packet now, else it will add the packet
// information to the queue and call TimeToSendPacket when it's time to send.
@@ -119,7 +115,6 @@ class PacedSender : public Module {
void UpdateMediaBytesSent(int num_bytes);
Callback* callback_;
- const float pace_multiplier_;
bool enabled_;
bool paused_;
int max_queue_length_ms_;
@@ -128,12 +123,9 @@ class PacedSender : public Module {
// we can pace out during the current interval.
scoped_ptr<paced_sender::IntervalBudget> media_budget_;
// This is the padding budget, keeping track of how many bits of padding we're
- // allowed to send out during the current interval.
+ // allowed to send out during the current interval. This budget will be
+ // utilized when there's no media to send.
scoped_ptr<paced_sender::IntervalBudget> padding_budget_;
- // Media and padding share this budget, therefore no padding will be sent if
- // media uses all of this budget. This is used to avoid padding above a given
- // bitrate.
- scoped_ptr<paced_sender::IntervalBudget> pad_up_to_bitrate_budget_;
TickTime time_last_update_;
TickTime time_last_send_;
diff --git a/chromium/third_party/webrtc/modules/pacing/paced_sender.cc b/chromium/third_party/webrtc/modules/pacing/paced_sender.cc
index c46bd04ed73..e9f9bddced1 100644
--- a/chromium/third_party/webrtc/modules/pacing/paced_sender.cc
+++ b/chromium/third_party/webrtc/modules/pacing/paced_sender.cc
@@ -120,19 +120,16 @@ class IntervalBudget {
};
} // namespace paced_sender
-PacedSender::PacedSender(Callback* callback, int target_bitrate_kbps,
- float pace_multiplier)
+PacedSender::PacedSender(Callback* callback,
+ int max_bitrate_kbps,
+ int min_bitrate_kbps)
: callback_(callback),
- pace_multiplier_(pace_multiplier),
- enabled_(false),
+ enabled_(true),
paused_(false),
max_queue_length_ms_(kDefaultMaxQueueLengthMs),
critsect_(CriticalSectionWrapper::CreateCriticalSection()),
- media_budget_(new paced_sender::IntervalBudget(
- pace_multiplier_ * target_bitrate_kbps)),
- padding_budget_(new paced_sender::IntervalBudget(0)),
- // No padding until UpdateBitrate is called.
- pad_up_to_bitrate_budget_(new paced_sender::IntervalBudget(0)),
+ media_budget_(new paced_sender::IntervalBudget(max_bitrate_kbps)),
+ padding_budget_(new paced_sender::IntervalBudget(min_bitrate_kbps)),
time_last_update_(TickTime::Now()),
capture_time_ms_last_queued_(0),
capture_time_ms_last_sent_(0),
@@ -165,13 +162,11 @@ bool PacedSender::Enabled() const {
return enabled_;
}
-void PacedSender::UpdateBitrate(int target_bitrate_kbps,
- int max_padding_bitrate_kbps,
- int pad_up_to_bitrate_kbps) {
+void PacedSender::UpdateBitrate(int max_bitrate_kbps,
+ int min_bitrate_kbps) {
CriticalSectionScoped cs(critsect_.get());
- media_budget_->set_target_rate_kbps(pace_multiplier_ * target_bitrate_kbps);
- padding_budget_->set_target_rate_kbps(max_padding_bitrate_kbps);
- pad_up_to_bitrate_budget_->set_target_rate_kbps(pad_up_to_bitrate_kbps);
+ media_budget_->set_target_rate_kbps(max_bitrate_kbps);
+ padding_budget_->set_target_rate_kbps(min_bitrate_kbps);
}
bool PacedSender::SendPacket(Priority priority, uint32_t ssrc,
@@ -273,24 +268,21 @@ int32_t PacedSender::Process() {
if (high_priority_packets_->empty() &&
normal_priority_packets_->empty() &&
low_priority_packets_->empty() &&
- padding_budget_->bytes_remaining() > 0 &&
- pad_up_to_bitrate_budget_->bytes_remaining() > 0) {
- int padding_needed = std::min(
- padding_budget_->bytes_remaining(),
- pad_up_to_bitrate_budget_->bytes_remaining());
+ padding_budget_->bytes_remaining() > 0) {
+ int padding_needed = padding_budget_->bytes_remaining();
critsect_->Leave();
int bytes_sent = callback_->TimeToSendPadding(padding_needed);
critsect_->Enter();
media_budget_->UseBudget(bytes_sent);
padding_budget_->UseBudget(bytes_sent);
- pad_up_to_bitrate_budget_->UseBudget(bytes_sent);
}
}
return 0;
}
// MUST have critsect_ when calling.
-bool PacedSender::SendPacketFromList(paced_sender::PacketList* packet_list) {
+bool PacedSender::SendPacketFromList(paced_sender::PacketList* packet_list)
+ EXCLUSIVE_LOCKS_REQUIRED(critsect_.get()) {
paced_sender::Packet packet = GetNextPacketFromList(packet_list);
critsect_->Leave();
@@ -323,7 +315,6 @@ bool PacedSender::SendPacketFromList(paced_sender::PacketList* packet_list) {
void PacedSender::UpdateBytesPerInterval(uint32_t delta_time_ms) {
media_budget_->IncreaseBudget(delta_time_ms);
padding_budget_->IncreaseBudget(delta_time_ms);
- pad_up_to_bitrate_budget_->IncreaseBudget(delta_time_ms);
}
// MUST have critsect_ when calling.
@@ -387,7 +378,7 @@ paced_sender::Packet PacedSender::GetNextPacketFromList(
void PacedSender::UpdateMediaBytesSent(int num_bytes) {
time_last_send_ = TickTime::Now();
media_budget_->UseBudget(num_bytes);
- pad_up_to_bitrate_budget_->UseBudget(num_bytes);
+ padding_budget_->UseBudget(num_bytes);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc b/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc
index f8dcdfc6985..9763c80070d 100644
--- a/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/pacing/paced_sender_unittest.cc
@@ -59,9 +59,8 @@ class PacedSenderTest : public ::testing::Test {
srand(0);
TickTime::UseFakeClock(123456);
// Need to initialize PacedSender after we initialize clock.
- send_bucket_.reset(new PacedSender(&callback_, kTargetBitrate,
- kPaceMultiplier));
- send_bucket_->SetStatus(true);
+ send_bucket_.reset(
+ new PacedSender(&callback_, kPaceMultiplier * kTargetBitrate, 0));
}
void SendAndExpectPacket(PacedSender::Priority priority,
@@ -209,7 +208,7 @@ TEST_F(PacedSenderTest, Padding) {
uint32_t ssrc = 12345;
uint16_t sequence_number = 1234;
- send_bucket_->UpdateBitrate(kTargetBitrate, kTargetBitrate, kTargetBitrate);
+ send_bucket_->UpdateBitrate(kPaceMultiplier * kTargetBitrate, kTargetBitrate);
// Due to the multiplicative factor we can send 3 packets not 2 packets.
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
TickTime::MillisecondTimestamp(), 250, false);
@@ -235,7 +234,7 @@ TEST_F(PacedSenderTest, Padding) {
TEST_F(PacedSenderTest, NoPaddingWhenDisabled) {
send_bucket_->SetStatus(false);
- send_bucket_->UpdateBitrate(kTargetBitrate, kTargetBitrate, kTargetBitrate);
+ send_bucket_->UpdateBitrate(kPaceMultiplier * kTargetBitrate, kTargetBitrate);
// No padding is expected since the pacer is disabled.
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
@@ -255,7 +254,7 @@ TEST_F(PacedSenderTest, VerifyPaddingUpToBitrate) {
int64_t capture_time_ms = 56789;
const int kTimeStep = 5;
const int64_t kBitrateWindow = 100;
- send_bucket_->UpdateBitrate(kTargetBitrate, kTargetBitrate, kTargetBitrate);
+ send_bucket_->UpdateBitrate(kPaceMultiplier * kTargetBitrate, kTargetBitrate);
int64_t start_time = TickTime::MillisecondTimestamp();
while (TickTime::MillisecondTimestamp() - start_time < kBitrateWindow) {
SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
@@ -267,27 +266,6 @@ TEST_F(PacedSenderTest, VerifyPaddingUpToBitrate) {
}
}
-TEST_F(PacedSenderTest, VerifyMaxPaddingBitrate) {
- uint32_t ssrc = 12345;
- uint16_t sequence_number = 1234;
- int64_t capture_time_ms = 56789;
- const int kTimeStep = 5;
- const int64_t kBitrateWindow = 100;
- const int kTargetBitrate = 1500;
- const int kMaxPaddingBitrate = 800;
- send_bucket_->UpdateBitrate(kTargetBitrate, kMaxPaddingBitrate,
- kTargetBitrate);
- int64_t start_time = TickTime::MillisecondTimestamp();
- while (TickTime::MillisecondTimestamp() - start_time < kBitrateWindow) {
- SendAndExpectPacket(PacedSender::kNormalPriority, ssrc, sequence_number++,
- capture_time_ms, 250, false);
- TickTime::AdvanceFakeClock(kTimeStep);
- EXPECT_CALL(callback_, TimeToSendPadding(500)).Times(1).
- WillOnce(Return(250));
- send_bucket_->Process();
- }
-}
-
TEST_F(PacedSenderTest, VerifyAverageBitrateVaryingMediaPayload) {
uint32_t ssrc = 12345;
uint16_t sequence_number = 1234;
@@ -295,10 +273,9 @@ TEST_F(PacedSenderTest, VerifyAverageBitrateVaryingMediaPayload) {
const int kTimeStep = 5;
const int64_t kBitrateWindow = 10000;
PacedSenderPadding callback;
- send_bucket_.reset(new PacedSender(&callback, kTargetBitrate,
- kPaceMultiplier));
- send_bucket_->SetStatus(true);
- send_bucket_->UpdateBitrate(kTargetBitrate, kTargetBitrate, kTargetBitrate);
+ send_bucket_.reset(
+ new PacedSender(&callback, kPaceMultiplier * kTargetBitrate, 0));
+ send_bucket_->UpdateBitrate(kPaceMultiplier * kTargetBitrate, kTargetBitrate);
int64_t start_time = TickTime::MillisecondTimestamp();
int media_bytes = 0;
while (TickTime::MillisecondTimestamp() - start_time < kBitrateWindow) {
@@ -421,7 +398,8 @@ TEST_F(PacedSenderTest, Pause) {
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
EXPECT_EQ(0, send_bucket_->Process());
- EXPECT_CALL(callback_, TimeToSendPacket(_, _, second_capture_time_ms, false))
+ EXPECT_CALL(
+ callback_, TimeToSendPacket(_, _, second_capture_time_ms, false))
.Times(1)
.WillRepeatedly(Return(true));
EXPECT_EQ(5, send_bucket_->TimeUntilNextProcess());
@@ -496,7 +474,7 @@ TEST_F(PacedSenderTest, MaxQueueLength) {
uint16_t sequence_number = 1234;
EXPECT_EQ(0, send_bucket_->QueueInMs());
- send_bucket_->UpdateBitrate(30, 0, 0);
+ send_bucket_->UpdateBitrate(kPaceMultiplier * 30, 0);
for (int i = 0; i < 30; ++i) {
SendAndExpectPacket(PacedSender::kNormalPriority,
ssrc,
@@ -525,7 +503,7 @@ TEST_F(PacedSenderTest, QueueTimeGrowsOverTime) {
uint16_t sequence_number = 1234;
EXPECT_EQ(0, send_bucket_->QueueInMs());
- send_bucket_->UpdateBitrate(30, 0, 0);
+ send_bucket_->UpdateBitrate(kPaceMultiplier * 30, 0);
SendAndExpectPacket(PacedSender::kNormalPriority,
ssrc,
sequence_number,
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/OWNERS b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/OWNERS
index b705ede2a00..eab2b8e0041 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/OWNERS
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/OWNERS
@@ -2,4 +2,8 @@ pwestin@webrtc.org
stefan@webrtc.org
henrik.lundin@webrtc.org
mflodman@webrtc.org
-asapersson@webrtc.org \ No newline at end of file
+asapersson@webrtc.org
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc
new file mode 100644
index 00000000000..6b208e4999d
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/bwe_simulations.cc
@@ -0,0 +1,162 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+using std::string;
+
+namespace webrtc {
+namespace testing {
+namespace bwe {
+#if BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
+BweTestConfig::EstimatorConfig CreateEstimatorConfig(
+ int flow_id, bool plot_delay, bool plot_estimate) {
+ static const AbsoluteSendTimeRemoteBitrateEstimatorFactory factory =
+ AbsoluteSendTimeRemoteBitrateEstimatorFactory();
+
+ return BweTestConfig::EstimatorConfig("AST", flow_id, &factory, kAimdControl,
+ plot_delay, plot_estimate);
+}
+
+BweTestConfig MakeAdaptiveBweTestConfig() {
+ BweTestConfig result;
+ result.estimator_configs.push_back(CreateEstimatorConfig(0, true, true));
+ return result;
+}
+
+BweTestConfig MakeMultiFlowBweTestConfig(int flow_count) {
+ BweTestConfig result;
+ for (int i = 0; i < flow_count; ++i) {
+ result.estimator_configs.push_back(CreateEstimatorConfig(i, false, true));
+ }
+ return result;
+}
+
+// This test fixture is used to instantiate tests running with adaptive video
+// senders.
+class BweSimulation : public BweTest,
+ public ::testing::TestWithParam<BweTestConfig> {
+ public:
+ BweSimulation() : BweTest() {}
+ virtual ~BweSimulation() {}
+
+ virtual void SetUp() {
+ const BweTestConfig& config = GetParam();
+ SetupTestFromConfig(config);
+ }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(BweSimulation);
+};
+
+INSTANTIATE_TEST_CASE_P(VideoSendersTest, BweSimulation,
+ ::testing::Values(MakeAdaptiveBweTestConfig()));
+
+TEST_P(BweSimulation, SprintUplinkTest) {
+ VerboseLogging(true);
+ AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
+ RateCounterFilter counter1(this, "sender_output");
+ TraceBasedDeliveryFilter filter(this, "link_capacity");
+ RateCounterFilter counter2(this, "receiver_input");
+ ASSERT_TRUE(filter.Init(test::ResourcePath("sprint-uplink", "rx")));
+ RunFor(60 * 1000);
+}
+
+TEST_P(BweSimulation, Verizon4gDownlinkTest) {
+ VerboseLogging(true);
+ AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
+ RateCounterFilter counter1(this, "sender_output");
+ TraceBasedDeliveryFilter filter(this, "link_capacity");
+ RateCounterFilter counter2(this, "receiver_input");
+ ASSERT_TRUE(filter.Init(test::ResourcePath("verizon4g-downlink", "rx")));
+ RunFor(22 * 60 * 1000);
+}
+
+TEST_P(BweSimulation, Choke1000kbps500kbps1000kbps) {
+ VerboseLogging(true);
+ AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
+ ChokeFilter filter(this);
+ RateCounterFilter counter(this, "receiver_input");
+ filter.SetCapacity(1000);
+ filter.SetMaxDelay(500);
+ RunFor(60 * 1000);
+ filter.SetCapacity(500);
+ RunFor(60 * 1000);
+ filter.SetCapacity(1000);
+ RunFor(60 * 1000);
+}
+
+TEST_P(BweSimulation, Choke200kbps30kbps200kbps) {
+ VerboseLogging(true);
+ AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
+ ChokeFilter filter(this);
+ RateCounterFilter counter(this, "receiver_input");
+ filter.SetCapacity(200);
+ filter.SetMaxDelay(500);
+ RunFor(60 * 1000);
+ filter.SetCapacity(30);
+ RunFor(60 * 1000);
+ filter.SetCapacity(200);
+ RunFor(60 * 1000);
+}
+
+TEST_P(BweSimulation, GoogleWifiTrace3Mbps) {
+ VerboseLogging(true);
+ AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
+ RateCounterFilter counter1(this, "sender_output");
+ TraceBasedDeliveryFilter filter(this, "link_capacity");
+ filter.SetMaxDelay(500);
+ RateCounterFilter counter2(this, "receiver_input");
+ ASSERT_TRUE(filter.Init(test::ResourcePath("google-wifi-3mbps", "rx")));
+ RunFor(300 * 1000);
+}
+
+class MultiFlowBweSimulation : public BweSimulation {
+ public:
+ MultiFlowBweSimulation() : BweSimulation() {}
+ virtual ~MultiFlowBweSimulation() {}
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MultiFlowBweSimulation);
+};
+
+INSTANTIATE_TEST_CASE_P(VideoSendersTest, MultiFlowBweSimulation,
+ ::testing::Values(MakeMultiFlowBweTestConfig(3)));
+
+TEST_P(MultiFlowBweSimulation, SelfFairnessTest) {
+ VerboseLogging(true);
+ const int kAllFlowIds[] = {0, 1, 2};
+ const size_t kNumFlows = sizeof(kAllFlowIds) / sizeof(kAllFlowIds[0]);
+ scoped_ptr<AdaptiveVideoSender> senders[kNumFlows];
+ for (size_t i = 0; i < kNumFlows; ++i) {
+ senders[i].reset(new AdaptiveVideoSender(kAllFlowIds[i], this, 30, 300, 0,
+ 0));
+ }
+ // Second and third flow.
+ ChokeFilter choke(this, CreateFlowIds(&kAllFlowIds[1], 2));
+ choke.SetCapacity(1500);
+ // First flow.
+ ChokeFilter choke2(this, CreateFlowIds(&kAllFlowIds[0], 1));
+ choke2.SetCapacity(1000);
+
+ scoped_ptr<RateCounterFilter> rate_counters[kNumFlows];
+ for (size_t i = 0; i < kNumFlows; ++i) {
+ rate_counters[i].reset(new RateCounterFilter(
+ this, CreateFlowIds(&kAllFlowIds[i], 1), "receiver_input"));
+ }
+ RunFor(30 * 60 * 1000);
+}
+#endif // BWE_TEST_LOGGING_COMPILE_TIME_ENABLE
+} // namespace bwe
+} // namespace testing
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
index 170ec765c1e..e61e903558f 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
@@ -25,6 +25,11 @@ namespace webrtc {
class Clock;
+enum RateControlType {
+ kMimdControl,
+ kAimdControl
+};
+
// RemoteBitrateObserver is used to signal changes in bitrate estimates for
// the incoming streams.
class RemoteBitrateObserver {
@@ -37,6 +42,27 @@ class RemoteBitrateObserver {
virtual ~RemoteBitrateObserver() {}
};
+struct ReceiveBandwidthEstimatorStats {
+ ReceiveBandwidthEstimatorStats() : total_propagation_time_delta_ms(0) {}
+
+ // The "propagation_time_delta" of a frame is defined as (d_arrival - d_sent),
+ // where d_arrival is the delta of the arrival times of the frame and the
+ // previous frame, d_sent is the delta of the sent times of the frame and
+ // the previous frame. The sent time is calculated from the RTP timestamp.
+
+ // |total_propagation_time_delta_ms| is the sum of the propagation_time_deltas
+ // of all received frames, except that it's is adjusted to 0 when it becomes
+ // negative.
+ int total_propagation_time_delta_ms;
+ // The propagation_time_deltas for the frames arrived in the last
+ // kProcessIntervalMs using the clock passed to
+ // RemoteBitrateEstimatorFactory::Create.
+ std::vector<int> recent_propagation_time_delta_ms;
+ // The arrival times for the frames arrived in the last kProcessIntervalMs
+ // using the clock passed to RemoteBitrateEstimatorFactory::Create.
+ std::vector<int64_t> recent_arrival_time_ms;
+};
+
class RemoteBitrateEstimator : public CallStatsObserver, public Module {
public:
virtual ~RemoteBitrateEstimator() {}
@@ -45,6 +71,7 @@ class RemoteBitrateEstimator : public CallStatsObserver, public Module {
// estimate and the over-use detector. If an over-use is detected the
// remote bitrate estimate will be updated. Note that |payload_size| is the
// packet size excluding headers.
+ // Note that |arrival_time_ms| can be of an arbitrary time base.
virtual void IncomingPacket(int64_t arrival_time_ms,
int payload_size,
const RTPHeader& header) = 0;
@@ -58,6 +85,9 @@ class RemoteBitrateEstimator : public CallStatsObserver, public Module {
virtual bool LatestEstimate(std::vector<unsigned int>* ssrcs,
unsigned int* bitrate_bps) const = 0;
+ // Returns true if the statistics are available.
+ virtual bool GetStats(ReceiveBandwidthEstimatorStats* output) const = 0;
+
protected:
static const int kProcessIntervalMs = 1000;
static const int kStreamTimeOutMs = 2000;
@@ -70,6 +100,7 @@ struct RemoteBitrateEstimatorFactory {
virtual RemoteBitrateEstimator* Create(
RemoteBitrateObserver* observer,
Clock* clock,
+ RateControlType control_type,
uint32_t min_bitrate_bps) const;
};
@@ -81,6 +112,7 @@ struct AbsoluteSendTimeRemoteBitrateEstimatorFactory
virtual RemoteBitrateEstimator* Create(
RemoteBitrateObserver* observer,
Clock* clock,
+ RateControlType control_type,
uint32_t min_bitrate_bps) const;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/rtp_to_ntp.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/rtp_to_ntp.h
deleted file mode 100644
index 7928abfacbc..00000000000
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/include/rtp_to_ntp.h
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_RTP_TO_NTP_H_
-#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_RTP_TO_NTP_H_
-
-#include <list>
-
-#include "webrtc/typedefs.h"
-
-namespace webrtc {
-
-namespace synchronization {
-
-struct RtcpMeasurement {
- RtcpMeasurement();
- RtcpMeasurement(uint32_t ntp_secs, uint32_t ntp_frac, uint32_t timestamp);
- uint32_t ntp_secs;
- uint32_t ntp_frac;
- uint32_t rtp_timestamp;
-};
-
-typedef std::list<RtcpMeasurement> RtcpList;
-
-// Converts an RTP timestamp to the NTP domain in milliseconds using two
-// (RTP timestamp, NTP timestamp) pairs.
-bool RtpToNtpMs(int64_t rtp_timestamp, const RtcpList& rtcp,
- int64_t* timestamp_in_ms);
-
-// Returns 1 there has been a forward wrap around, 0 if there has been no wrap
-// around and -1 if there has been a backwards wrap around (i.e. reordering).
-int CheckForWrapArounds(uint32_t rtp_timestamp, uint32_t rtcp_rtp_timestamp);
-} // namespace synchronization
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_RTP_TO_NTP_H_
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc
index 86f6cb8ee3c..9baaa9c9134 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.cc
@@ -10,19 +10,12 @@
#include <math.h>
#include <stdlib.h> // fabsf
-#if _WIN32
-#include <windows.h>
-#endif
#include "webrtc/modules/remote_bitrate_estimator/overuse_detector.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_rate_control.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/trace.h"
-#ifdef WEBRTC_BWE_MATLAB
-extern MatlabEngine eng; // global variable defined elsewhere
-#endif
-
enum { kOverUsingTimeThreshold = 100 };
enum { kMinFramePeriodHistoryLength = 60 };
@@ -43,74 +36,20 @@ OveruseDetector::OveruseDetector(const OverUseDetectorOptions& options)
prev_offset_(0.0),
time_over_using_(-1),
over_use_counter_(0),
- hypothesis_(kBwNormal),
- time_of_last_received_packet_(-1)
-#ifdef WEBRTC_BWE_MATLAB
- , plots_()
-#endif
- {
+ hypothesis_(kBwNormal) {
memcpy(E_, options_.initial_e, sizeof(E_));
memcpy(process_noise_, options_.initial_process_noise,
sizeof(process_noise_));
}
OveruseDetector::~OveruseDetector() {
-#ifdef WEBRTC_BWE_MATLAB
- if (plots_.plot1_) {
- eng.DeletePlot(plots_.plot1_);
- plots_.plot1_ = NULL;
- }
- if (plots_.plot2_) {
- eng.DeletePlot(plots_.plot2_);
- plots_.plot2_ = NULL;
- }
- if (plots_.plot3_) {
- eng.DeletePlot(plots_.plot3_);
- plots_.plot3_ = NULL;
- }
- if (plots_.plot4_) {
- eng.DeletePlot(plots_.plot4_);
- plots_.plot4_ = NULL;
- }
-#endif
-
ts_delta_hist_.clear();
}
void OveruseDetector::Update(uint16_t packet_size,
int64_t timestamp_ms,
uint32_t timestamp,
- const int64_t now_ms) {
- time_of_last_received_packet_ = now_ms;
-#ifdef WEBRTC_BWE_MATLAB
- // Create plots
- const int64_t startTimeMs = nowMS;
- if (plots_.plot1_ == NULL) {
- plots_.plot1_ = eng.NewPlot(new MatlabPlot());
- plots_.plot1_->AddLine(1000, "b.", "scatter");
- }
- if (plots_.plot2_ == NULL) {
- plots_.plot2_ = eng.NewPlot(new MatlabPlot());
- plots_.plot2_->AddTimeLine(30, "b", "offset", startTimeMs);
- plots_.plot2_->AddTimeLine(30, "r--", "limitPos", startTimeMs);
- plots_.plot2_->AddTimeLine(30, "k.", "trigger", startTimeMs);
- plots_.plot2_->AddTimeLine(30, "ko", "detection", startTimeMs);
- // plots_.plot2_->AddTimeLine(30, "g", "slowMean", startTimeMs);
- }
- if (plots_.plot3_ == NULL) {
- plots_.plot3_ = eng.NewPlot(new MatlabPlot());
- plots_.plot3_->AddTimeLine(30, "b", "noiseVar", startTimeMs);
- }
- if (plots_.plot4_ == NULL) {
- plots_.plot4_ = eng.NewPlot(new MatlabPlot());
- // plots_.plot4_->AddTimeLine(60, "b", "p11", startTimeMs);
- // plots_.plot4_->AddTimeLine(60, "r", "p12", startTimeMs);
- plots_.plot4_->AddTimeLine(60, "g", "p22", startTimeMs);
- // plots_.plot4_->AddTimeLine(60, "g--", "p22_hat", startTimeMs);
- // plots_.plot4_->AddTimeLine(30, "b.-", "deltaFs", startTimeMs);
- }
-
-#endif
+ const int64_t arrival_time_ms) {
bool new_timestamp = (timestamp != current_frame_.timestamp);
if (timestamp_ms >= 0) {
if (prev_frame_.timestamp_ms == -1 && current_frame_.timestamp_ms == -1) {
@@ -127,8 +66,6 @@ void OveruseDetector::Update(uint16_t packet_size,
return;
} else if (new_timestamp) {
// First packet of a later frame, the previous frame sample is ready.
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "Frame complete at %I64i",
- current_frame_.complete_time_ms);
if (prev_frame_.complete_time_ms >= 0) { // This is our second frame.
int64_t t_delta = 0;
double ts_delta = 0;
@@ -143,7 +80,7 @@ void OveruseDetector::Update(uint16_t packet_size,
}
// Accumulate the frame size
current_frame_.size += packet_size;
- current_frame_.complete_time_ms = now_ms;
+ current_frame_.complete_time_ms = arrival_time_ms;
}
BandwidthUsage OveruseDetector::State() const {
@@ -168,10 +105,6 @@ void OveruseDetector::SetRateControlRegion(RateControlRegion region) {
}
}
-int64_t OveruseDetector::time_of_last_received_packet() const {
- return time_of_last_received_packet_;
-}
-
void OveruseDetector::SwitchTimeBase() {
current_frame_.size = 0;
current_frame_.complete_time_ms = -1;
@@ -249,10 +182,10 @@ void OveruseDetector::UpdateKalman(int64_t t_delta,
const double residual = t_ts_delta - slope_*h[0] - offset_;
const bool stable_state =
- (BWE_MIN(num_of_deltas_, 60) * fabsf(offset_) < threshold_);
+ (BWE_MIN(num_of_deltas_, 60) * fabs(offset_) < threshold_);
// We try to filter out very late frames. For instance periodic key
// frames doesn't fit the Gaussian model well.
- if (fabsf(residual) < 3 * sqrt(var_noise_)) {
+ if (fabs(residual) < 3 * sqrt(var_noise_)) {
UpdateNoiseEstimate(residual, min_frame_period, stable_state);
} else {
UpdateNoiseEstimate(3 * sqrt(var_noise_), min_frame_period, stable_state);
@@ -279,39 +212,11 @@ void OveruseDetector::UpdateKalman(int64_t t_delta,
E_[0][0] * E_[1][1] - E_[0][1] * E_[1][0] >= 0 &&
E_[0][0] >= 0);
-#ifdef WEBRTC_BWE_MATLAB
- // plots_.plot4_->Append("p11",E_[0][0]);
- // plots_.plot4_->Append("p12",E_[0][1]);
- plots_.plot4_->Append("p22", E_[1][1]);
- // plots_.plot4_->Append("p22_hat", 0.5*(process_noise_[1] +
- // sqrt(process_noise_[1]*(process_noise_[1] + 4*var_noise_))));
- // plots_.plot4_->Append("deltaFs", fsDelta);
- plots_.plot4_->Plot();
-#endif
slope_ = slope_ + K[0] * residual;
prev_offset_ = offset_;
offset_ = offset_ + K[1] * residual;
Detect(ts_delta);
-
-#ifdef WEBRTC_BWE_MATLAB
- plots_.plot1_->Append("scatter",
- static_cast<double>(current_frame_.size) - prev_frame_.size,
- static_cast<double>(t_delta - ts_delta));
- plots_.plot1_->MakeTrend("scatter", "slope", slope_, offset_, "k-");
- plots_.plot1_->MakeTrend("scatter", "thresholdPos",
- slope_, offset_ + 2 * sqrt(var_noise_), "r-");
- plots_.plot1_->MakeTrend("scatter", "thresholdNeg",
- slope_, offset_ - 2 * sqrt(var_noise_), "r-");
- plots_.plot1_->Plot();
-
- plots_.plot2_->Append("offset", offset_);
- plots_.plot2_->Append("limitPos", threshold_/BWE_MIN(num_of_deltas_, 60));
- plots_.plot2_->Plot();
-
- plots_.plot3_->Append("noiseVar", var_noise_);
- plots_.plot3_->Plot();
-#endif
}
double OveruseDetector::UpdateMinFramePeriod(double ts_delta) {
@@ -358,7 +263,7 @@ BandwidthUsage OveruseDetector::Detect(double ts_delta) {
return kBwNormal;
}
const double T = BWE_MIN(num_of_deltas_, 60) * offset_;
- if (fabsf(T) > threshold_) {
+ if (fabs(T) > threshold_) {
if (offset_ > 0) {
if (time_over_using_ == -1) {
// Initialize the timer. Assume that we've been
@@ -373,38 +278,17 @@ BandwidthUsage OveruseDetector::Detect(double ts_delta) {
if (time_over_using_ > kOverUsingTimeThreshold
&& over_use_counter_ > 1) {
if (offset_ >= prev_offset_) {
-#ifdef _DEBUG
- if (hypothesis_ != kBwOverusing) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: kBwOverusing");
- }
-#endif
time_over_using_ = 0;
over_use_counter_ = 0;
hypothesis_ = kBwOverusing;
-#ifdef WEBRTC_BWE_MATLAB
- plots_.plot2_->Append("detection", offset_); // plot it later
-#endif
}
}
-#ifdef WEBRTC_BWE_MATLAB
- plots_.plot2_->Append("trigger", offset_); // plot it later
-#endif
} else {
-#ifdef _DEBUG
- if (hypothesis_ != kBwUnderusing) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: kBwUnderUsing");
- }
-#endif
time_over_using_ = -1;
over_use_counter_ = 0;
hypothesis_ = kBwUnderusing;
}
} else {
-#ifdef _DEBUG
- if (hypothesis_ != kBwNormal) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: kBwNormal");
- }
-#endif
time_over_using_ = -1;
over_use_counter_ = 0;
hypothesis_ = kBwNormal;
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.h
index 57f4ddf04f7..9c565e45f18 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/overuse_detector.h
@@ -16,10 +16,6 @@
#include "webrtc/modules/remote_bitrate_estimator/include/bwe_defines.h"
#include "webrtc/typedefs.h"
-#ifdef WEBRTC_BWE_MATLAB
-#include "webrtc/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.h"
-#endif
-
namespace webrtc {
enum RateControlRegion;
@@ -32,11 +28,10 @@ class OveruseDetector {
void Update(uint16_t packet_size,
int64_t timestamp_ms,
uint32_t rtp_timestamp,
- int64_t now_ms);
+ int64_t arrival_time_ms);
BandwidthUsage State() const;
double NoiseVar() const;
void SetRateControlRegion(RateControlRegion region);
- int64_t time_of_last_received_packet() const;
private:
struct FrameSample {
@@ -52,16 +47,6 @@ class OveruseDetector {
int64_t timestamp_ms;
};
- struct DebugPlots {
-#ifdef WEBRTC_BWE_MATLAB
- DebugPlots() : plot1(NULL), plot2(NULL), plot3(NULL), plot4(NULL) {}
- MatlabPlot* plot1;
- MatlabPlot* plot2;
- MatlabPlot* plot3;
- MatlabPlot* plot4;
-#endif
- };
-
// Returns true if |timestamp| represent a time which is later than
// |prev_timestamp|.
static bool InOrderTimestamp(uint32_t timestamp, uint32_t prev_timestamp);
@@ -103,10 +88,6 @@ class OveruseDetector {
double time_over_using_;
uint16_t over_use_counter_;
BandwidthUsage hypothesis_;
- int64_t time_of_last_received_packet_;
-#ifdef WEBRTC_BWE_MATLAB
- DebugPlots plots_;
-#endif
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.cc
index 4a9b4488108..48485ffb551 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.cc
@@ -10,6 +10,8 @@
#include "webrtc/modules/remote_bitrate_estimator/rate_statistics.h"
+#include <assert.h>
+
namespace webrtc {
RateStatistics::RateStatistics(uint32_t window_size_ms, float scale)
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.h
index 429669059a2..f97371bd621 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rate_statistics.h
@@ -34,7 +34,7 @@ class RateStatistics {
// Counters are kept in buckets (circular buffer), with one bucket
// per millisecond.
const int num_buckets_;
- scoped_array<uint32_t> buckets_;
+ scoped_ptr<uint32_t[]> buckets_;
// Total count recorded in buckets.
uint32_t accumulated_count_;
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi
index bbd353fcdd4..c2f1b3da475 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi
@@ -21,10 +21,66 @@
'sources': [
'include/bwe_defines.h',
'include/remote_bitrate_estimator.h',
- 'include/rtp_to_ntp.h',
'rate_statistics.cc',
'rate_statistics.h',
- 'rtp_to_ntp.cc',
+ ], # source
+ },
+ {
+ 'target_name': 'bwe_tools_util',
+ 'type': 'static_library',
+ 'dependencies': [
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ 'rtp_rtcp',
+ ],
+ 'sources': [
+ 'tools/bwe_rtp.cc',
+ 'tools/bwe_rtp.h',
+ ],
+ },
+ {
+ 'target_name': 'bwe_rtp_to_text',
+ 'type': 'executable',
+ 'includes': [
+ '../rtp_rtcp/source/rtp_rtcp.gypi',
+ ],
+ 'dependencies': [
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
+ 'bwe_tools_util',
+ 'rtp_rtcp',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ 'include',
+ ],
+ },
+ 'sources': [
+ 'tools/rtp_to_text.cc',
+ '<(webrtc_root)/modules/video_coding/main/test/rtp_file_reader.cc',
+ '<(webrtc_root)/modules/video_coding/main/test/rtp_file_reader.h',
+ ], # source
+ },
+ {
+ 'target_name': 'bwe_rtp_play',
+ 'type': 'executable',
+ 'includes': [
+ '../rtp_rtcp/source/rtp_rtcp.gypi',
+ ],
+ 'dependencies': [
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
+ 'bwe_tools_util',
+ 'rtp_rtcp',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ 'include',
+ ],
+ },
+ 'sources': [
+ 'tools/bwe_rtp_play.cc',
+ '<(webrtc_root)/modules/video_coding/main/test/rtp_file_reader.cc',
+ '<(webrtc_root)/modules/video_coding/main/test/rtp_file_reader.h',
], # source
},
], # targets
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
index a544ee5d034..08422d28b23 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc
@@ -16,8 +16,8 @@
#include "webrtc/modules/remote_bitrate_estimator/remote_rate_control.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -55,11 +55,31 @@ class RemoteBitrateEstimatorSingleStream : public RemoteBitrateEstimator {
virtual bool LatestEstimate(std::vector<unsigned int>* ssrcs,
unsigned int* bitrate_bps) const OVERRIDE;
+ virtual bool GetStats(
+ ReceiveBandwidthEstimatorStats* output) const OVERRIDE;
+
private:
- typedef std::map<unsigned int, OveruseDetector> SsrcOveruseDetectorMap;
+ // Map from SSRC to over-use detector and last incoming packet time in
+ // milliseconds, taken from clock_.
+ typedef std::map<unsigned int, std::pair<OveruseDetector, int64_t> >
+ SsrcOveruseDetectorMap;
+
+ static OveruseDetector* GetDetector(
+ const SsrcOveruseDetectorMap::iterator it) {
+ return &it->second.first;
+ }
+
+ static int64_t GetPacketTimeMs(const SsrcOveruseDetectorMap::iterator it) {
+ return it->second.second;
+ }
+
+ static void SetPacketTimeMs(SsrcOveruseDetectorMap::iterator it,
+ int64_t time_ms) {
+ it->second.second = time_ms;
+ }
// Triggers a new estimate calculation.
- void UpdateEstimate(int64_t time_now);
+ void UpdateEstimate(int64_t now_ms);
void GetSsrcs(std::vector<unsigned int>* ssrcs) const;
@@ -92,6 +112,7 @@ void RemoteBitrateEstimatorSingleStream::IncomingPacket(
uint32_t ssrc = header.ssrc;
uint32_t rtp_timestamp = header.timestamp +
header.extension.transmissionTimeOffset;
+ int64_t now_ms = clock_->TimeInMilliseconds();
CriticalSectionScoped cs(crit_sect_.get());
SsrcOveruseDetectorMap::iterator it = overuse_detectors_.find(ssrc);
if (it == overuse_detectors_.end()) {
@@ -102,22 +123,23 @@ void RemoteBitrateEstimatorSingleStream::IncomingPacket(
// automatically cleaned up when we have one RemoteBitrateEstimator per REMB
// group.
std::pair<SsrcOveruseDetectorMap::iterator, bool> insert_result =
- overuse_detectors_.insert(std::make_pair(ssrc, OveruseDetector(
- OverUseDetectorOptions())));
+ overuse_detectors_.insert(std::make_pair(ssrc,
+ std::make_pair(OveruseDetector(OverUseDetectorOptions()), now_ms)));
it = insert_result.first;
}
- OveruseDetector* overuse_detector = &it->second;
- incoming_bitrate_.Update(payload_size, arrival_time_ms);
+ SetPacketTimeMs(it, now_ms);
+ OveruseDetector* overuse_detector = GetDetector(it);
+ incoming_bitrate_.Update(payload_size, now_ms);
const BandwidthUsage prior_state = overuse_detector->State();
overuse_detector->Update(payload_size, -1, rtp_timestamp, arrival_time_ms);
if (overuse_detector->State() == kBwOverusing) {
- unsigned int incoming_bitrate = incoming_bitrate_.Rate(arrival_time_ms);
+ unsigned int incoming_bitrate = incoming_bitrate_.Rate(now_ms);
if (prior_state != kBwOverusing ||
- remote_rate_.TimeToReduceFurther(arrival_time_ms, incoming_bitrate)) {
+ remote_rate_.TimeToReduceFurther(now_ms, incoming_bitrate)) {
// The first overuse should immediately trigger a new estimate.
// We also have to update the estimate immediately if we are overusing
// and the target bitrate is too high compared to what we are receiving.
- UpdateEstimate(arrival_time_ms);
+ UpdateEstimate(now_ms);
}
}
}
@@ -126,8 +148,9 @@ int32_t RemoteBitrateEstimatorSingleStream::Process() {
if (TimeUntilNextProcess() > 0) {
return 0;
}
- UpdateEstimate(clock_->TimeInMilliseconds());
- last_process_time_ = clock_->TimeInMilliseconds();
+ int64_t now_ms = clock_->TimeInMilliseconds();
+ UpdateEstimate(now_ms);
+ last_process_time_ = now_ms;
return 0;
}
@@ -138,25 +161,24 @@ int32_t RemoteBitrateEstimatorSingleStream::TimeUntilNextProcess() {
return last_process_time_ + kProcessIntervalMs - clock_->TimeInMilliseconds();
}
-void RemoteBitrateEstimatorSingleStream::UpdateEstimate(int64_t time_now) {
+void RemoteBitrateEstimatorSingleStream::UpdateEstimate(int64_t now_ms) {
CriticalSectionScoped cs(crit_sect_.get());
BandwidthUsage bw_state = kBwNormal;
double sum_noise_var = 0.0;
SsrcOveruseDetectorMap::iterator it = overuse_detectors_.begin();
while (it != overuse_detectors_.end()) {
- const int64_t time_of_last_received_packet =
- it->second.time_of_last_received_packet();
- if (time_of_last_received_packet >= 0 &&
- time_now - time_of_last_received_packet > kStreamTimeOutMs) {
+ if (GetPacketTimeMs(it) >= 0 &&
+ now_ms - GetPacketTimeMs(it) > kStreamTimeOutMs) {
// This over-use detector hasn't received packets for |kStreamTimeOutMs|
// milliseconds and is considered stale.
overuse_detectors_.erase(it++);
} else {
- sum_noise_var += it->second.NoiseVar();
+ OveruseDetector* overuse_detector = GetDetector(it);
+ sum_noise_var += overuse_detector->NoiseVar();
// Make sure that we trigger an over-use if any of the over-use detectors
// is detecting over-use.
- if (it->second.State() > bw_state) {
- bw_state = it->second.State();
+ if (overuse_detector->State() > bw_state) {
+ bw_state = overuse_detector->State();
}
++it;
}
@@ -169,17 +191,17 @@ void RemoteBitrateEstimatorSingleStream::UpdateEstimate(int64_t time_now) {
double mean_noise_var = sum_noise_var /
static_cast<double>(overuse_detectors_.size());
const RateControlInput input(bw_state,
- incoming_bitrate_.Rate(time_now),
+ incoming_bitrate_.Rate(now_ms),
mean_noise_var);
- const RateControlRegion region = remote_rate_.Update(&input, time_now);
- unsigned int target_bitrate = remote_rate_.UpdateBandwidthEstimate(time_now);
+ const RateControlRegion region = remote_rate_.Update(&input, now_ms);
+ unsigned int target_bitrate = remote_rate_.UpdateBandwidthEstimate(now_ms);
if (remote_rate_.ValidEstimate()) {
std::vector<unsigned int> ssrcs;
GetSsrcs(&ssrcs);
observer_->OnReceiveBitrateChanged(ssrcs, target_bitrate);
}
for (it = overuse_detectors_.begin(); it != overuse_detectors_.end(); ++it) {
- it->second.SetRateControlRegion(region);
+ GetDetector(it)->SetRateControlRegion(region);
}
}
@@ -210,6 +232,12 @@ bool RemoteBitrateEstimatorSingleStream::LatestEstimate(
return true;
}
+bool RemoteBitrateEstimatorSingleStream::GetStats(
+ ReceiveBandwidthEstimatorStats* output) const {
+ // Not implemented.
+ return false;
+}
+
void RemoteBitrateEstimatorSingleStream::GetSsrcs(
std::vector<unsigned int>* ssrcs) const {
assert(ssrcs);
@@ -225,9 +253,9 @@ void RemoteBitrateEstimatorSingleStream::GetSsrcs(
RemoteBitrateEstimator* RemoteBitrateEstimatorFactory::Create(
RemoteBitrateObserver* observer,
Clock* clock,
+ RateControlType control_type,
uint32_t min_bitrate_bps) const {
- WEBRTC_TRACE(kTraceStateInfo, kTraceRemoteBitrateEstimator, -1,
- "RemoteBitrateEstimatorFactory: Instantiating.");
+ LOG(LS_INFO) << "RemoteBitrateEstimatorFactory: Instantiating.";
return new RemoteBitrateEstimatorSingleStream(observer, clock,
min_bitrate_bps);
}
@@ -235,9 +263,10 @@ RemoteBitrateEstimator* RemoteBitrateEstimatorFactory::Create(
RemoteBitrateEstimator* AbsoluteSendTimeRemoteBitrateEstimatorFactory::Create(
RemoteBitrateObserver* observer,
Clock* clock,
+ RateControlType control_type,
uint32_t min_bitrate_bps) const {
- WEBRTC_TRACE(kTraceStateInfo, kTraceRemoteBitrateEstimator, -1,
- "AbsoluteSendTimeRemoteBitrateEstimatorFactory: Instantiating.");
+ LOG(LS_INFO) << "AbsoluteSendTimeRemoteBitrateEstimatorFactory: "
+ "Instantiating.";
return new RemoteBitrateEstimatorSingleStream(observer, clock,
min_bitrate_bps);
}
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc
index a3e44d87ef5..f67c7f34fc0 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream_unittest.cc
@@ -10,8 +10,8 @@
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
namespace webrtc {
@@ -24,6 +24,7 @@ class RemoteBitrateEstimatorSingleTest : public RemoteBitrateEstimatorTest {
bitrate_estimator_.reset(RemoteBitrateEstimatorFactory().Create(
bitrate_observer_.get(),
&clock_,
+ kMimdControl,
kRemoteBitrateEstimatorMinBitrateBps));
}
protected:
@@ -35,7 +36,7 @@ TEST_F(RemoteBitrateEstimatorSingleTest, InitialBehavior) {
}
TEST_F(RemoteBitrateEstimatorSingleTest, RateIncreaseReordering) {
- RateIncreaseReorderingTestHelper();
+ RateIncreaseReorderingTestHelper(498136);
}
TEST_F(RemoteBitrateEstimatorSingleTest, RateIncreaseRtpTimestamps) {
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc
index 88ffe061b99..1b38a1ea306 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.cc
@@ -226,7 +226,8 @@ void RemoteBitrateEstimatorTest::IncomingPacket(uint32_t ssrc,
header.ssrc = ssrc;
header.timestamp = rtp_timestamp;
header.extension.absoluteSendTime = absolute_send_time;
- bitrate_estimator_->IncomingPacket(arrival_time, payload_size, header);
+ bitrate_estimator_->IncomingPacket(arrival_time + kArrivalTimeClockOffsetMs,
+ payload_size, header);
}
// Generates a frame of packets belonging to a stream at a given bitrate and
@@ -245,6 +246,10 @@ bool RemoteBitrateEstimatorTest::GenerateAndProcessFrame(unsigned int ssrc,
while (!packets.empty()) {
testing::RtpStream::RtpPacket* packet = packets.front();
bitrate_observer_->Reset();
+ // The simulated clock should match the time of packet->arrival_time
+ // since both are used in IncomingPacket().
+ clock_.AdvanceTimeMicroseconds(packet->arrival_time -
+ clock_.TimeInMicroseconds());
IncomingPacket(packet->ssrc,
packet->size,
(packet->arrival_time + 500) / 1000,
@@ -256,8 +261,6 @@ bool RemoteBitrateEstimatorTest::GenerateAndProcessFrame(unsigned int ssrc,
overuse = true;
EXPECT_LE(bitrate_observer_->latest_bitrate(), bitrate_bps);
}
- clock_.AdvanceTimeMicroseconds(packet->arrival_time -
- clock_.TimeInMicroseconds());
delete packet;
packets.pop_front();
}
@@ -341,9 +344,14 @@ void RemoteBitrateEstimatorTest::InitialBehaviorTestHelper(
EXPECT_TRUE(bitrate_observer_->updated());
bitrate_observer_->Reset();
EXPECT_EQ(bitrate_observer_->latest_bitrate(), bitrate_bps);
+ bitrate_estimator_->RemoveStream(kDefaultSsrc);
+ EXPECT_TRUE(bitrate_estimator_->LatestEstimate(&ssrcs, &bitrate_bps));
+ ASSERT_EQ(0u, ssrcs.size());
+ EXPECT_EQ(0u, bitrate_bps);
}
-void RemoteBitrateEstimatorTest::RateIncreaseReorderingTestHelper() {
+void RemoteBitrateEstimatorTest::RateIncreaseReorderingTestHelper(
+ uint32_t expected_bitrate_bps) {
const int kFramerate = 50; // 50 fps to avoid rounding errors.
const int kFrameIntervalMs = 1000 / kFramerate;
const uint32_t kFrameIntervalAbsSendTime = AbsSendTime(1, kFramerate);
@@ -364,7 +372,7 @@ void RemoteBitrateEstimatorTest::RateIncreaseReorderingTestHelper() {
}
bitrate_estimator_->Process();
EXPECT_TRUE(bitrate_observer_->updated());
- EXPECT_EQ(498136u, bitrate_observer_->latest_bitrate());
+ EXPECT_EQ(expected_bitrate_bps, bitrate_observer_->latest_bitrate());
for (int i = 0; i < 10; ++i) {
clock_.AdvanceTimeMilliseconds(2 * kFrameIntervalMs);
timestamp += 2 * 90 * kFrameIntervalMs;
@@ -379,7 +387,7 @@ void RemoteBitrateEstimatorTest::RateIncreaseReorderingTestHelper() {
}
bitrate_estimator_->Process();
EXPECT_TRUE(bitrate_observer_->updated());
- EXPECT_EQ(498136u, bitrate_observer_->latest_bitrate());
+ EXPECT_EQ(expected_bitrate_bps, bitrate_observer_->latest_bitrate());
}
// Make sure we initially increase the bitrate as expected.
@@ -486,5 +494,21 @@ void RemoteBitrateEstimatorTest::CapacityDropTestHelper(
EXPECT_EQ(expected_bitrate_drop_delta,
bitrate_drop_time - overuse_start_time);
+
+ // Remove stream one by one.
+ unsigned int latest_bps = 0;
+ std::vector<unsigned int> ssrcs;
+ for (int i = 0; i < number_of_streams; i++) {
+ EXPECT_TRUE(bitrate_estimator_->LatestEstimate(&ssrcs, &latest_bps));
+ EXPECT_EQ(number_of_streams - i, static_cast<int>(ssrcs.size()));
+ EXPECT_EQ(bitrate_bps, latest_bps);
+ for (int j = i; j < number_of_streams; j++) {
+ EXPECT_EQ(kDefaultSsrc + j, ssrcs[j - i]);
+ }
+ bitrate_estimator_->RemoveStream(kDefaultSsrc + i);
+ }
+ EXPECT_TRUE(bitrate_estimator_->LatestEstimate(&ssrcs, &latest_bps));
+ EXPECT_EQ(0u, ssrcs.size());
+ EXPECT_EQ(0u, latest_bps);
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h
index 64830dab70f..1d748c57b9f 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest_helper.h
@@ -16,9 +16,9 @@
#include <utility>
#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/system_wrappers/interface/clock.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
@@ -190,7 +190,7 @@ class RemoteBitrateEstimatorTest : public ::testing::Test {
unsigned int target_bitrate);
void InitialBehaviorTestHelper(unsigned int expected_converge_bitrate);
- void RateIncreaseReorderingTestHelper();
+ void RateIncreaseReorderingTestHelper(unsigned int expected_bitrate);
void RateIncreaseRtpTimestampsTestHelper();
void CapacityDropTestHelper(int number_of_streams,
bool wrap_time_stamp,
@@ -198,6 +198,7 @@ class RemoteBitrateEstimatorTest : public ::testing::Test {
unsigned int expected_bitrate_drop_delta);
static const unsigned int kDefaultSsrc;
+ static const int kArrivalTimeClockOffsetMs = 60000;
SimulatedClock clock_; // Time at the receiver.
scoped_ptr<testing::TestBitrateObserver> bitrate_observer_;
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc
index ed8e5c555ee..67b60848148 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimators_test.cc
@@ -8,90 +8,95 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <sstream>
+
#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
#include "webrtc/modules/remote_bitrate_estimator/test/bwe_test.h"
+#include "webrtc/test/testsupport/fileutils.h"
+#include "webrtc/test/testsupport/perf_test.h"
+
+using std::string;
namespace webrtc {
namespace testing {
namespace bwe {
+enum Estimator { kAbsSendTime, kTransmissionOffset };
-std::vector<const PacketSenderFactory*> VideoSenderFactories(uint32_t count) {
- class VideoPacketSenderFactory : public PacketSenderFactory {
- public:
- VideoPacketSenderFactory(float fps, uint32_t kbps, uint32_t ssrc,
- float frame_offset)
- : fps_(fps),
- kbps_(kbps),
- ssrc_(ssrc),
- frame_offset_(frame_offset) {
- }
- virtual ~VideoPacketSenderFactory() {}
- virtual PacketSender* Create() const {
- return new VideoSender(NULL, fps_, kbps_, ssrc_, frame_offset_);
- }
- private:
- float fps_;
- uint32_t kbps_;
- uint32_t ssrc_;
- float frame_offset_;
- };
-
- static const VideoPacketSenderFactory factories[] = {
- VideoPacketSenderFactory(30.00f, 150, 0x1234, 0.13f),
- VideoPacketSenderFactory(15.00f, 500, 0x2345, 0.16f),
- VideoPacketSenderFactory(30.00f, 1200, 0x3456, 0.26f),
- VideoPacketSenderFactory(7.49f, 150, 0x4567, 0.05f),
- VideoPacketSenderFactory(7.50f, 150, 0x5678, 0.15f),
- VideoPacketSenderFactory(7.51f, 150, 0x6789, 0.25f),
- VideoPacketSenderFactory(15.02f, 150, 0x7890, 0.27f),
- VideoPacketSenderFactory(15.03f, 150, 0x8901, 0.38f),
- VideoPacketSenderFactory(30.02f, 150, 0x9012, 0.39f),
- VideoPacketSenderFactory(30.03f, 150, 0x0123, 0.52f)
- };
- assert(count <= sizeof(factories) / sizeof(factories[0]));
-
- std::vector<const PacketSenderFactory*> result;
- for (uint32_t i = 0; i < count; ++i) {
- result.push_back(&factories[i]);
- }
- return result;
-}
-
-std::vector<BweTestConfig::EstimatorConfig> EstimatorConfigs() {
+BweTestConfig::EstimatorConfig EstimatorConfigs(Estimator estimator,
+ int flow_id) {
static const RemoteBitrateEstimatorFactory factories[] = {
RemoteBitrateEstimatorFactory(),
AbsoluteSendTimeRemoteBitrateEstimatorFactory()
};
-
- std::vector<BweTestConfig::EstimatorConfig> result;
- result.push_back(BweTestConfig::EstimatorConfig("TOF", &factories[0]));
- result.push_back(BweTestConfig::EstimatorConfig("AST", &factories[1]));
- return result;
+ switch (estimator) {
+ case kTransmissionOffset:
+ return BweTestConfig::EstimatorConfig("TOF", flow_id, &factories[0],
+ kMimdControl, false, false);
+ case kAbsSendTime:
+ return BweTestConfig::EstimatorConfig("AST", flow_id, &factories[1],
+ kMimdControl, false, false);
+ }
+ assert(false);
+ return BweTestConfig::EstimatorConfig();
}
-BweTestConfig MakeBweTestConfig(uint32_t sender_count) {
- BweTestConfig result = {
- VideoSenderFactories(sender_count), EstimatorConfigs()
- };
+struct DefaultBweTestConfig {
+ BweTestConfig bwe_test_config;
+ size_t number_of_senders;
+};
+
+DefaultBweTestConfig MakeBweTestConfig(uint32_t sender_count,
+ Estimator estimator) {
+ DefaultBweTestConfig result;
+ result.bwe_test_config.estimator_configs.push_back(
+ EstimatorConfigs(estimator, 0));
+ result.number_of_senders = sender_count;
return result;
}
-INSTANTIATE_TEST_CASE_P(VideoSendersTest, BweTest,
- ::testing::Values(MakeBweTestConfig(1),
- MakeBweTestConfig(3)));
+class DefaultBweTest : public BweTest,
+ public ::testing::TestWithParam<DefaultBweTestConfig> {
+ public:
+ DefaultBweTest() : packet_senders_() {}
+ virtual ~DefaultBweTest() {}
+
+ virtual void SetUp() {
+ const DefaultBweTestConfig& config = GetParam();
+ SetupTestFromConfig(config.bwe_test_config);
+ for (size_t i = 0; i < config.number_of_senders; ++i) {
+ packet_senders_.push_back(new VideoSender(0, this, 30, 300, 0, 0));
+ }
+ }
+
+ virtual void TearDown() {
+ while (!packet_senders_.empty()) {
+ delete packet_senders_.front();
+ packet_senders_.pop_front();
+ }
+ }
+
+ protected:
+ std::list<PacketSender*> packet_senders_;
+};
+
+INSTANTIATE_TEST_CASE_P(VideoSendersTest, DefaultBweTest,
+ ::testing::Values(MakeBweTestConfig(1, kAbsSendTime),
+ MakeBweTestConfig(3, kAbsSendTime),
+ MakeBweTestConfig(1, kTransmissionOffset),
+ MakeBweTestConfig(3, kTransmissionOffset)));
-TEST_P(BweTest, UnlimitedSpeed) {
+TEST_P(DefaultBweTest, UnlimitedSpeed) {
VerboseLogging(false);
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, SteadyLoss) {
+TEST_P(DefaultBweTest, DISABLED_SteadyLoss) {
LossFilter loss(this);
loss.SetLoss(20.0);
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, IncreasingLoss1) {
+TEST_P(DefaultBweTest, IncreasingLoss1) {
LossFilter loss(this);
for (int i = 0; i < 76; ++i) {
loss.SetLoss(i);
@@ -99,13 +104,13 @@ TEST_P(BweTest, IncreasingLoss1) {
}
}
-TEST_P(BweTest, SteadyDelay) {
+TEST_P(DefaultBweTest, SteadyDelay) {
DelayFilter delay(this);
delay.SetDelay(1000);
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, IncreasingDelay1) {
+TEST_P(DefaultBweTest, DISABLED_IncreasingDelay1) {
DelayFilter delay(this);
RunFor(10 * 60 * 1000);
for (int i = 0; i < 30 * 2; ++i) {
@@ -115,7 +120,7 @@ TEST_P(BweTest, IncreasingDelay1) {
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, IncreasingDelay2) {
+TEST_P(DefaultBweTest, IncreasingDelay2) {
DelayFilter delay(this);
RateCounterFilter counter(this);
RunFor(1 * 60 * 1000);
@@ -127,7 +132,7 @@ TEST_P(BweTest, IncreasingDelay2) {
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, JumpyDelay1) {
+TEST_P(DefaultBweTest, JumpyDelay1) {
DelayFilter delay(this);
RunFor(10 * 60 * 1000);
for (int i = 1; i < 200; ++i) {
@@ -140,14 +145,14 @@ TEST_P(BweTest, JumpyDelay1) {
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, SteadyJitter) {
+TEST_P(DefaultBweTest, SteadyJitter) {
JitterFilter jitter(this);
RateCounterFilter counter(this);
jitter.SetJitter(20);
RunFor(2 * 60 * 1000);
}
-TEST_P(BweTest, IncreasingJitter1) {
+TEST_P(DefaultBweTest, IncreasingJitter1) {
JitterFilter jitter(this);
for (int i = 0; i < 2 * 60 * 2; ++i) {
jitter.SetJitter(i);
@@ -156,7 +161,7 @@ TEST_P(BweTest, IncreasingJitter1) {
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, IncreasingJitter2) {
+TEST_P(DefaultBweTest, IncreasingJitter2) {
JitterFilter jitter(this);
RunFor(30 * 1000);
for (int i = 1; i < 51; ++i) {
@@ -167,13 +172,13 @@ TEST_P(BweTest, IncreasingJitter2) {
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, SteadyReorder) {
+TEST_P(DefaultBweTest, SteadyReorder) {
ReorderFilter reorder(this);
reorder.SetReorder(20.0);
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, IncreasingReorder1) {
+TEST_P(DefaultBweTest, IncreasingReorder1) {
ReorderFilter reorder(this);
for (int i = 0; i < 76; ++i) {
reorder.SetReorder(i);
@@ -181,13 +186,13 @@ TEST_P(BweTest, IncreasingReorder1) {
}
}
-TEST_P(BweTest, SteadyChoke) {
+TEST_P(DefaultBweTest, DISABLED_SteadyChoke) {
ChokeFilter choke(this);
choke.SetCapacity(140);
RunFor(10 * 60 * 1000);
}
-TEST_P(BweTest, IncreasingChoke1) {
+TEST_P(DefaultBweTest, DISABLED_IncreasingChoke1) {
ChokeFilter choke(this);
for (int i = 1200; i >= 100; i -= 100) {
choke.SetCapacity(i);
@@ -195,7 +200,7 @@ TEST_P(BweTest, IncreasingChoke1) {
}
}
-TEST_P(BweTest, IncreasingChoke2) {
+TEST_P(DefaultBweTest, DISABLED_IncreasingChoke2) {
ChokeFilter choke(this);
RunFor(60 * 1000);
for (int i = 1200; i >= 100; i -= 20) {
@@ -204,7 +209,7 @@ TEST_P(BweTest, IncreasingChoke2) {
}
}
-TEST_P(BweTest, Multi1) {
+TEST_P(DefaultBweTest, DISABLED_Multi1) {
DelayFilter delay(this);
ChokeFilter choke(this);
RateCounterFilter counter(this);
@@ -219,7 +224,7 @@ TEST_P(BweTest, Multi1) {
RunFor(5 * 60 * 1000);
}
-TEST_P(BweTest, Multi2) {
+TEST_P(DefaultBweTest, Multi2) {
ChokeFilter choke(this);
JitterFilter jitter(this);
RateCounterFilter counter(this);
@@ -227,6 +232,108 @@ TEST_P(BweTest, Multi2) {
jitter.SetJitter(120);
RunFor(5 * 60 * 1000);
}
+
+// This test fixture is used to instantiate tests running with adaptive video
+// senders.
+class BweFeedbackTest : public BweTest,
+ public ::testing::TestWithParam<BweTestConfig> {
+ public:
+ BweFeedbackTest() : BweTest() {}
+ virtual ~BweFeedbackTest() {}
+
+ virtual void SetUp() {
+ BweTestConfig config;
+ config.estimator_configs.push_back(EstimatorConfigs(kAbsSendTime, 0));
+ SetupTestFromConfig(config);
+ }
+
+ void PrintResults(double max_throughput_kbps, Stats<double> throughput_kbps,
+ Stats<double> delay_ms) {
+ double utilization = throughput_kbps.GetMean() / max_throughput_kbps;
+ webrtc::test::PrintResult("BwePerformance",
+ GetTestName(),
+ "Utilization",
+ utilization * 100.0,
+ "%",
+ false);
+ std::stringstream ss;
+ ss << throughput_kbps.GetStdDev() / throughput_kbps.GetMean();
+ webrtc::test::PrintResult("BwePerformance",
+ GetTestName(),
+ "Utilization var coeff",
+ ss.str(),
+ "",
+ false);
+ webrtc::test::PrintResult("BwePerformance",
+ GetTestName(),
+ "Average delay",
+ delay_ms.AsString(),
+ "ms",
+ false);
+ }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(BweFeedbackTest);
+};
+
+TEST_F(BweFeedbackTest, Choke1000kbps500kbps1000kbps) {
+ AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
+ ChokeFilter filter(this);
+ RateCounterFilter counter(this, "receiver_input");
+ const int kHighCapacityKbps = 1000;
+ const int kLowCapacityKbps = 500;
+ filter.SetCapacity(kHighCapacityKbps);
+ filter.SetMaxDelay(500);
+ RunFor(60 * 1000);
+ filter.SetCapacity(kLowCapacityKbps);
+ RunFor(60 * 1000);
+ filter.SetCapacity(kHighCapacityKbps);
+ RunFor(60 * 1000);
+ PrintResults((2 * kHighCapacityKbps + kLowCapacityKbps) / 3.0,
+ counter.GetBitrateStats(), filter.GetDelayStats());
+}
+
+TEST_F(BweFeedbackTest, Choke200kbps30kbps200kbps) {
+ AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
+ ChokeFilter filter(this);
+ RateCounterFilter counter(this, "receiver_input");
+ const int kHighCapacityKbps = 200;
+ const int kLowCapacityKbps = 30;
+ filter.SetCapacity(kHighCapacityKbps);
+ filter.SetMaxDelay(500);
+ RunFor(60 * 1000);
+ filter.SetCapacity(kLowCapacityKbps);
+ RunFor(60 * 1000);
+ filter.SetCapacity(kHighCapacityKbps);
+ RunFor(60 * 1000);
+
+ PrintResults((2 * kHighCapacityKbps + kLowCapacityKbps) / 3.0,
+ counter.GetBitrateStats(), filter.GetDelayStats());
+}
+
+TEST_F(BweFeedbackTest, Verizon4gDownlinkTest) {
+ AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
+ RateCounterFilter counter1(this, "sender_output");
+ TraceBasedDeliveryFilter filter(this, "link_capacity");
+ RateCounterFilter counter2(this, "receiver_input");
+ ASSERT_TRUE(filter.Init(test::ResourcePath("verizon4g-downlink", "rx")));
+ RunFor(22 * 60 * 1000);
+ PrintResults(filter.GetBitrateStats().GetMean(), counter2.GetBitrateStats(),
+ filter.GetDelayStats());
+}
+
+// webrtc:3277
+TEST_F(BweFeedbackTest, DISABLED_GoogleWifiTrace3Mbps) {
+ AdaptiveVideoSender sender(0, this, 30, 300, 0, 0);
+ RateCounterFilter counter1(this, "sender_output");
+ TraceBasedDeliveryFilter filter(this, "link_capacity");
+ filter.SetMaxDelay(500);
+ RateCounterFilter counter2(this, "receiver_input");
+ ASSERT_TRUE(filter.Init(test::ResourcePath("google-wifi-3mbps", "rx")));
+ RunFor(300 * 1000);
+ PrintResults(filter.GetBitrateStats().GetMean(), counter2.GetBitrateStats(),
+ filter.GetDelayStats());
+}
} // namespace bwe
} // namespace testing
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.cc
index 994abdbee89..dda36a765e9 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.cc
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.cc
@@ -122,8 +122,6 @@ RateControlRegion RemoteRateControl::Update(const RateControlInput* input,
}
updated_ = true;
current_input_ = *input;
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: Incoming rate = %u kbps",
- input->_incomingBitRate/1000);
return rate_control_region_;
}
@@ -158,18 +156,11 @@ uint32_t RemoteRateControl::ChangeBitRate(uint32_t current_bit_rate,
ChangeRegion(kRcAboveMax);
}
}
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "BWE: Response time: %f + %i + 10*33\n",
- avg_change_period_, rtt_);
const uint32_t response_time = static_cast<uint32_t>(avg_change_period_ +
0.5f) + rtt_ + 300;
double alpha = RateIncreaseFactor(now_ms, last_bit_rate_change_,
response_time, noise_var);
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "BWE: avg_change_period_ = %f ms; RTT = %u ms", avg_change_period_,
- rtt_);
-
current_bit_rate = static_cast<uint32_t>(current_bit_rate * alpha) + 1000;
if (max_hold_rate_ > 0 && beta_ * max_hold_rate_ > current_bit_rate) {
current_bit_rate = static_cast<uint32_t>(beta_ * max_hold_rate_);
@@ -178,9 +169,6 @@ uint32_t RemoteRateControl::ChangeBitRate(uint32_t current_bit_rate,
recovery = true;
}
max_hold_rate_ = 0;
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "BWE: Increase rate to current_bit_rate = %u kbps",
- current_bit_rate / 1000);
last_bit_rate_change_ = now_ms;
break;
}
@@ -207,10 +195,6 @@ uint32_t RemoteRateControl::ChangeBitRate(uint32_t current_bit_rate,
}
UpdateMaxBitRateEstimate(incoming_bit_rate_kbps);
-
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "BWE: Decrease rate to current_bit_rate = %u kbps",
- current_bit_rate / 1000);
}
// Stay on hold until the pipes are cleared.
ChangeState(kRcHold);
@@ -251,8 +235,6 @@ double RemoteRateControl::RateIncreaseFactor(int64_t now_ms,
alpha = 1.3;
}
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: alpha = %f", alpha);
-
if (last_ms > -1) {
alpha = pow(alpha, (now_ms - last_ms) / 1000.0);
}
@@ -341,45 +323,5 @@ void RemoteRateControl::ChangeRegion(RateControlRegion region) {
void RemoteRateControl::ChangeState(RateControlState new_state) {
came_from_state_ = rate_control_state_;
rate_control_state_ = new_state;
- char state1[15];
- char state2[15];
- char state3[15];
- StateStr(came_from_state_, state1);
- StateStr(rate_control_state_, state2);
- StateStr(current_input_._bwState, state3);
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "\t%s => %s due to %s\n", state1, state2, state3);
-}
-
-void RemoteRateControl::StateStr(RateControlState state, char* str) {
- switch (state) {
- case kRcDecrease:
- strncpy(str, "DECREASE", 9);
- break;
- case kRcHold:
- strncpy(str, "HOLD", 5);
- break;
- case kRcIncrease:
- strncpy(str, "INCREASE", 9);
- break;
- default:
- assert(false);
- }
-}
-
-void RemoteRateControl::StateStr(BandwidthUsage state, char* str) {
- switch (state) {
- case kBwNormal:
- strncpy(str, "NORMAL", 7);
- break;
- case kBwOverusing:
- strncpy(str, "OVER USING", 11);
- break;
- case kBwUnderusing:
- strncpy(str, "UNDER USING", 12);
- break;
- default:
- assert(false);
- }
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.h
index b525834eca1..d02c6d56b50 100644
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.h
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/remote_rate_control.h
@@ -53,8 +53,6 @@ class RemoteRateControl {
void ChangeState(const RateControlInput& input, int64_t now_ms);
void ChangeState(RateControlState new_state);
void ChangeRegion(RateControlRegion region);
- static void StateStr(RateControlState state, char* str);
- static void StateStr(BandwidthUsage state, char* str);
uint32_t min_configured_bit_rate_;
uint32_t max_configured_bit_rate_;
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp.cc
deleted file mode 100644
index 109edae7cc5..00000000000
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp.cc
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/remote_bitrate_estimator/include/rtp_to_ntp.h"
-
-#include "webrtc/system_wrappers/interface/clock.h"
-
-#include <assert.h>
-
-namespace webrtc {
-
-namespace synchronization {
-
-RtcpMeasurement::RtcpMeasurement()
- : ntp_secs(0), ntp_frac(0), rtp_timestamp(0) {}
-
-RtcpMeasurement::RtcpMeasurement(uint32_t ntp_secs, uint32_t ntp_frac,
- uint32_t timestamp)
- : ntp_secs(ntp_secs), ntp_frac(ntp_frac), rtp_timestamp(timestamp) {}
-
-// Calculates the RTP timestamp frequency from two pairs of NTP and RTP
-// timestamps.
-bool CalculateFrequency(
- int64_t rtcp_ntp_ms1,
- uint32_t rtp_timestamp1,
- int64_t rtcp_ntp_ms2,
- uint32_t rtp_timestamp2,
- double* frequency_khz) {
- if (rtcp_ntp_ms1 <= rtcp_ntp_ms2) {
- return false;
- }
- *frequency_khz = static_cast<double>(rtp_timestamp1 - rtp_timestamp2) /
- static_cast<double>(rtcp_ntp_ms1 - rtcp_ntp_ms2);
- return true;
-}
-
-// Detects if there has been a wraparound between |old_timestamp| and
-// |new_timestamp|, and compensates by adding 2^32 if that is the case.
-bool CompensateForWrapAround(uint32_t new_timestamp,
- uint32_t old_timestamp,
- int64_t* compensated_timestamp) {
- assert(compensated_timestamp);
- int64_t wraps = synchronization::CheckForWrapArounds(new_timestamp,
- old_timestamp);
- if (wraps < 0) {
- // Reordering, don't use this packet.
- return false;
- }
- *compensated_timestamp = new_timestamp + (wraps << 32);
- return true;
-}
-
-// Converts |rtp_timestamp| to the NTP time base using the NTP and RTP timestamp
-// pairs in |rtcp|. The converted timestamp is returned in
-// |rtp_timestamp_in_ms|. This function compensates for wrap arounds in RTP
-// timestamps and returns false if it can't do the conversion due to reordering.
-bool RtpToNtpMs(int64_t rtp_timestamp,
- const synchronization::RtcpList& rtcp,
- int64_t* rtp_timestamp_in_ms) {
- assert(rtcp.size() == 2);
- int64_t rtcp_ntp_ms_new = Clock::NtpToMs(rtcp.front().ntp_secs,
- rtcp.front().ntp_frac);
- int64_t rtcp_ntp_ms_old = Clock::NtpToMs(rtcp.back().ntp_secs,
- rtcp.back().ntp_frac);
- int64_t rtcp_timestamp_new = rtcp.front().rtp_timestamp;
- int64_t rtcp_timestamp_old = rtcp.back().rtp_timestamp;
- if (!CompensateForWrapAround(rtcp_timestamp_new,
- rtcp_timestamp_old,
- &rtcp_timestamp_new)) {
- return false;
- }
- double freq_khz;
- if (!CalculateFrequency(rtcp_ntp_ms_new,
- rtcp_timestamp_new,
- rtcp_ntp_ms_old,
- rtcp_timestamp_old,
- &freq_khz)) {
- return false;
- }
- double offset = rtcp_timestamp_new - freq_khz * rtcp_ntp_ms_new;
- int64_t rtp_timestamp_unwrapped;
- if (!CompensateForWrapAround(rtp_timestamp, rtcp_timestamp_old,
- &rtp_timestamp_unwrapped)) {
- return false;
- }
- double rtp_timestamp_ntp_ms = (static_cast<double>(rtp_timestamp_unwrapped) -
- offset) / freq_khz + 0.5f;
- if (rtp_timestamp_ntp_ms < 0) {
- return false;
- }
- *rtp_timestamp_in_ms = rtp_timestamp_ntp_ms;
- return true;
-}
-
-int CheckForWrapArounds(uint32_t new_timestamp, uint32_t old_timestamp) {
- if (new_timestamp < old_timestamp) {
- // This difference should be less than -2^31 if we have had a wrap around
- // (e.g. |new_timestamp| = 1, |rtcp_rtp_timestamp| = 2^32 - 1). Since it is
- // cast to a int32_t, it should be positive.
- if (static_cast<int32_t>(new_timestamp - old_timestamp) > 0) {
- // Forward wrap around.
- return 1;
- }
- } else if (static_cast<int32_t>(old_timestamp - new_timestamp) > 0) {
- // This difference should be less than -2^31 if we have had a backward wrap
- // around. Since it is cast to a int32_t, it should be positive.
- return -1;
- }
- return 0;
-}
-} // namespace synchronization
-} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp_unittest.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp_unittest.cc
deleted file mode 100644
index aff314aaa53..00000000000
--- a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/rtp_to_ntp_unittest.cc
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testing/gtest/include/gtest/gtest.h"
-#include "webrtc/modules/remote_bitrate_estimator/include/rtp_to_ntp.h"
-
-namespace webrtc {
-
-TEST(WrapAroundTests, NoWrap) {
- EXPECT_EQ(0, synchronization::CheckForWrapArounds(0xFFFFFFFF, 0xFFFFFFFE));
- EXPECT_EQ(0, synchronization::CheckForWrapArounds(1, 0));
- EXPECT_EQ(0, synchronization::CheckForWrapArounds(0x00010000, 0x0000FFFF));
-}
-
-TEST(WrapAroundTests, ForwardWrap) {
- EXPECT_EQ(1, synchronization::CheckForWrapArounds(0, 0xFFFFFFFF));
- EXPECT_EQ(1, synchronization::CheckForWrapArounds(0, 0xFFFF0000));
- EXPECT_EQ(1, synchronization::CheckForWrapArounds(0x0000FFFF, 0xFFFFFFFF));
- EXPECT_EQ(1, synchronization::CheckForWrapArounds(0x0000FFFF, 0xFFFF0000));
-}
-
-TEST(WrapAroundTests, BackwardWrap) {
- EXPECT_EQ(-1, synchronization::CheckForWrapArounds(0xFFFFFFFF, 0));
- EXPECT_EQ(-1, synchronization::CheckForWrapArounds(0xFFFF0000, 0));
- EXPECT_EQ(-1, synchronization::CheckForWrapArounds(0xFFFFFFFF, 0x0000FFFF));
- EXPECT_EQ(-1, synchronization::CheckForWrapArounds(0xFFFF0000, 0x0000FFFF));
-}
-
-TEST(WrapAroundTests, OldRtcpWrapped) {
- synchronization::RtcpList rtcp;
- uint32_t ntp_sec = 0;
- uint32_t ntp_frac = 0;
- uint32_t timestamp = 0;
- const uint32_t kOneMsInNtpFrac = 4294967;
- const uint32_t kTimestampTicksPerMs = 90;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp -= kTimestampTicksPerMs;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp -= kTimestampTicksPerMs;
- int64_t timestamp_in_ms = -1;
- // This expected to fail since it's highly unlikely that the older RTCP
- // has a much smaller RTP timestamp than the newer.
- EXPECT_FALSE(synchronization::RtpToNtpMs(timestamp, rtcp, &timestamp_in_ms));
-}
-
-TEST(WrapAroundTests, NewRtcpWrapped) {
- synchronization::RtcpList rtcp;
- uint32_t ntp_sec = 0;
- uint32_t ntp_frac = 0;
- uint32_t timestamp = 0xFFFFFFFF;
- const uint32_t kOneMsInNtpFrac = 4294967;
- const uint32_t kTimestampTicksPerMs = 90;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp += kTimestampTicksPerMs;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- int64_t timestamp_in_ms = -1;
- EXPECT_TRUE(synchronization::RtpToNtpMs(rtcp.back().rtp_timestamp, rtcp,
- &timestamp_in_ms));
- // Since this RTP packet has the same timestamp as the RTCP packet constructed
- // at time 0 it should be mapped to 0 as well.
- EXPECT_EQ(0, timestamp_in_ms);
-}
-
-TEST(WrapAroundTests, RtpWrapped) {
- const uint32_t kOneMsInNtpFrac = 4294967;
- const uint32_t kTimestampTicksPerMs = 90;
- synchronization::RtcpList rtcp;
- uint32_t ntp_sec = 0;
- uint32_t ntp_frac = 0;
- uint32_t timestamp = 0xFFFFFFFF - 2 * kTimestampTicksPerMs;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp += kTimestampTicksPerMs;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp += kTimestampTicksPerMs;
- int64_t timestamp_in_ms = -1;
- EXPECT_TRUE(synchronization::RtpToNtpMs(timestamp, rtcp,
- &timestamp_in_ms));
- // Since this RTP packet has the same timestamp as the RTCP packet constructed
- // at time 0 it should be mapped to 0 as well.
- EXPECT_EQ(2, timestamp_in_ms);
-}
-
-TEST(WrapAroundTests, OldRtp_RtcpsWrapped) {
- const uint32_t kOneMsInNtpFrac = 4294967;
- const uint32_t kTimestampTicksPerMs = 90;
- synchronization::RtcpList rtcp;
- uint32_t ntp_sec = 0;
- uint32_t ntp_frac = 0;
- uint32_t timestamp = 0;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp += kTimestampTicksPerMs;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp -= 2*kTimestampTicksPerMs;
- int64_t timestamp_in_ms = -1;
- EXPECT_FALSE(synchronization::RtpToNtpMs(timestamp, rtcp,
- &timestamp_in_ms));
-}
-
-TEST(WrapAroundTests, OldRtp_NewRtcpWrapped) {
- const uint32_t kOneMsInNtpFrac = 4294967;
- const uint32_t kTimestampTicksPerMs = 90;
- synchronization::RtcpList rtcp;
- uint32_t ntp_sec = 0;
- uint32_t ntp_frac = 0;
- uint32_t timestamp = 0xFFFFFFFF;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp += kTimestampTicksPerMs;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp -= kTimestampTicksPerMs;
- int64_t timestamp_in_ms = -1;
- EXPECT_TRUE(synchronization::RtpToNtpMs(timestamp, rtcp,
- &timestamp_in_ms));
- // Constructed at the same time as the first RTCP and should therefore be
- // mapped to zero.
- EXPECT_EQ(0, timestamp_in_ms);
-}
-
-TEST(WrapAroundTests, OldRtp_OldRtcpWrapped) {
- const uint32_t kOneMsInNtpFrac = 4294967;
- const uint32_t kTimestampTicksPerMs = 90;
- synchronization::RtcpList rtcp;
- uint32_t ntp_sec = 0;
- uint32_t ntp_frac = 0;
- uint32_t timestamp = 0;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp -= kTimestampTicksPerMs;
- rtcp.push_front(synchronization::RtcpMeasurement(ntp_sec, ntp_frac,
- timestamp));
- ntp_frac += kOneMsInNtpFrac;
- timestamp += 2*kTimestampTicksPerMs;
- int64_t timestamp_in_ms = -1;
- EXPECT_FALSE(synchronization::RtpToNtpMs(timestamp, rtcp,
- &timestamp_in_ms));
-}
-}; // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc
new file mode 100644
index 00000000000..40fa6df8ffb
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.cc
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h"
+
+#include <stdio.h>
+#include <string>
+
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
+#include "webrtc/modules/video_coding/main/test/rtp_file_reader.h"
+#include "webrtc/modules/video_coding/main/test/rtp_player.h"
+
+using webrtc::rtpplayer::RtpPacketSourceInterface;
+
+const int kMinBitrateBps = 30000;
+
+bool ParseArgsAndSetupEstimator(int argc,
+ char** argv,
+ webrtc::Clock* clock,
+ webrtc::RemoteBitrateObserver* observer,
+ RtpPacketSourceInterface** rtp_reader,
+ webrtc::RtpHeaderParser** parser,
+ webrtc::RemoteBitrateEstimator** estimator,
+ std::string* estimator_used) {
+ *rtp_reader = webrtc::rtpplayer::CreateRtpFileReader(argv[3]);
+ if (!*rtp_reader) {
+ fprintf(stderr, "Cannot open input file %s\n", argv[3]);
+ return false;
+ }
+ fprintf(stderr, "Input file: %s\n\n", argv[3]);
+ webrtc::RTPExtensionType extension = webrtc::kRtpExtensionAbsoluteSendTime;
+
+ if (strncmp("tsoffset", argv[1], 8) == 0) {
+ extension = webrtc::kRtpExtensionTransmissionTimeOffset;
+ fprintf(stderr, "Extension: toffset\n");
+ } else {
+ fprintf(stderr, "Extension: abs\n");
+ }
+ int id = atoi(argv[2]);
+
+ // Setup the RTP header parser and the bitrate estimator.
+ *parser = webrtc::RtpHeaderParser::Create();
+ (*parser)->RegisterRtpHeaderExtension(extension, id);
+ if (estimator) {
+ switch (extension) {
+ case webrtc::kRtpExtensionAbsoluteSendTime: {
+ webrtc::AbsoluteSendTimeRemoteBitrateEstimatorFactory factory;
+ *estimator = factory.Create(observer, clock, webrtc::kAimdControl,
+ kMinBitrateBps);
+ *estimator_used = "AbsoluteSendTimeRemoteBitrateEstimator";
+ break;
+ }
+ case webrtc::kRtpExtensionTransmissionTimeOffset: {
+ webrtc::RemoteBitrateEstimatorFactory factory;
+ *estimator = factory.Create(observer, clock, webrtc::kAimdControl,
+ kMinBitrateBps);
+ *estimator_used = "RemoteBitrateEstimator";
+ break;
+ }
+ default:
+ assert(false);
+ }
+ }
+ return true;
+}
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h
new file mode 100644
index 00000000000..714457d5668
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TOOLS_BWE_RTP_H_
+#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TOOLS_BWE_RTP_H_
+
+#include <string>
+
+namespace webrtc {
+class Clock;
+class RemoteBitrateEstimator;
+class RemoteBitrateObserver;
+class RtpHeaderParser;
+namespace rtpplayer {
+class RtpPacketSourceInterface;
+}
+}
+
+bool ParseArgsAndSetupEstimator(
+ int argc,
+ char** argv,
+ webrtc::Clock* clock,
+ webrtc::RemoteBitrateObserver* observer,
+ webrtc::rtpplayer::RtpPacketSourceInterface** rtp_reader,
+ webrtc::RtpHeaderParser** parser,
+ webrtc::RemoteBitrateEstimator** estimator,
+ std::string* estimator_used);
+
+#endif // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_TOOLS_BWE_RTP_H_
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc
new file mode 100644
index 00000000000..9ea3f08eab5
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp_play.cc
@@ -0,0 +1,116 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include "webrtc/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
+#include "webrtc/modules/video_coding/main/test/rtp_file_reader.h"
+#include "webrtc/modules/video_coding/main/test/rtp_player.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+using webrtc::rtpplayer::RtpPacketSourceInterface;
+
+class Observer : public webrtc::RemoteBitrateObserver {
+ public:
+ explicit Observer(webrtc::Clock* clock) : clock_(clock) {}
+
+ // Called when a receive channel group has a new bitrate estimate for the
+ // incoming streams.
+ virtual void OnReceiveBitrateChanged(const std::vector<unsigned int>& ssrcs,
+ unsigned int bitrate) {
+ printf("[%u] Num SSRCs: %d, bitrate: %u\n",
+ static_cast<uint32_t>(clock_->TimeInMilliseconds()),
+ static_cast<int>(ssrcs.size()), bitrate);
+ }
+
+ virtual ~Observer() {}
+
+ private:
+ webrtc::Clock* clock_;
+};
+
+int main(int argc, char** argv) {
+ if (argc < 4) {
+ printf("Usage: bwe_rtp_play <extension type> <extension id> "
+ "<input_file.rtp>\n");
+ printf("<extension type> can either be:\n"
+ " abs for absolute send time or\n"
+ " tsoffset for timestamp offset.\n"
+ "<extension id> is the id associated with the extension.\n");
+ return -1;
+ }
+ RtpPacketSourceInterface* reader;
+ webrtc::RemoteBitrateEstimator* estimator;
+ webrtc::RtpHeaderParser* parser;
+ std::string estimator_used;
+ webrtc::SimulatedClock clock(0);
+ Observer observer(&clock);
+ if (!ParseArgsAndSetupEstimator(argc, argv, &clock, &observer, &reader,
+ &parser, &estimator, &estimator_used)) {
+ return -1;
+ }
+ webrtc::scoped_ptr<RtpPacketSourceInterface> rtp_reader(reader);
+ webrtc::scoped_ptr<webrtc::RtpHeaderParser> rtp_parser(parser);
+ webrtc::scoped_ptr<webrtc::RemoteBitrateEstimator> rbe(estimator);
+
+ // Process the file.
+ int packet_counter = 0;
+ int64_t next_process_time_ms = 0;
+ int64_t next_rtp_time_ms = 0;
+ int64_t first_rtp_time_ms = -1;
+ const uint32_t kMaxPacketSize = 1500;
+ uint8_t packet_buffer[kMaxPacketSize];
+ uint8_t* packet = packet_buffer;
+ int non_zero_abs_send_time = 0;
+ int non_zero_ts_offsets = 0;
+ while (true) {
+ uint32_t next_rtp_time;
+ if (next_rtp_time_ms <= clock.TimeInMilliseconds()) {
+ uint32_t packet_length = kMaxPacketSize;
+ if (rtp_reader->NextPacket(packet, &packet_length,
+ &next_rtp_time) == -1) {
+ break;
+ }
+ if (first_rtp_time_ms == -1)
+ first_rtp_time_ms = next_rtp_time;
+ next_rtp_time_ms = next_rtp_time - first_rtp_time_ms;
+ webrtc::RTPHeader header;
+ parser->Parse(packet, packet_length, &header);
+ if (header.extension.absoluteSendTime != 0)
+ ++non_zero_abs_send_time;
+ if (header.extension.transmissionTimeOffset != 0)
+ ++non_zero_ts_offsets;
+ rbe->IncomingPacket(clock.TimeInMilliseconds(),
+ packet_length - header.headerLength,
+ header);
+ ++packet_counter;
+ }
+ next_process_time_ms = rbe->TimeUntilNextProcess() +
+ clock.TimeInMilliseconds();
+ if (next_process_time_ms <= clock.TimeInMilliseconds()) {
+ rbe->Process();
+ }
+ int time_until_next_event =
+ std::min(next_process_time_ms, next_rtp_time_ms) -
+ clock.TimeInMilliseconds();
+ clock.AdvanceTimeMilliseconds(std::max(time_until_next_event, 0));
+ }
+ printf("Parsed %d packets\nTime passed: %u ms\n", packet_counter,
+ static_cast<uint32_t>(clock.TimeInMilliseconds()));
+ printf("Estimator used: %s\n", estimator_used.c_str());
+ printf("Packets with non-zero absolute send time: %d\n",
+ non_zero_abs_send_time);
+ printf("Packets with non-zero timestamp offset: %d\n",
+ non_zero_ts_offsets);
+ return 0;
+}
diff --git a/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc
new file mode 100644
index 00000000000..af4a4d4ee7b
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/remote_bitrate_estimator/tools/rtp_to_text.cc
@@ -0,0 +1,80 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <sstream>
+
+#include "webrtc/modules/remote_bitrate_estimator/tools/bwe_rtp.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
+#include "webrtc/modules/video_coding/main/test/rtp_file_reader.h"
+#include "webrtc/modules/video_coding/main/test/rtp_player.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+using webrtc::rtpplayer::RtpPacketSourceInterface;
+
+int main(int argc, char** argv) {
+ if (argc < 4) {
+ fprintf(stderr, "Usage: rtp_to_text <extension type> <extension id>"
+ " <input_file.rtp> [-t]\n");
+ fprintf(stderr, "<extension type> can either be:\n"
+ " abs for absolute send time or\n"
+ " tsoffset for timestamp offset.\n"
+ "<extension id> is the id associated with the extension.\n"
+ " -t is an optional flag, if set only packet arrival time will be"
+ " output.\n");
+ return -1;
+ }
+ RtpPacketSourceInterface* reader;
+ webrtc::RtpHeaderParser* parser;
+ if (!ParseArgsAndSetupEstimator(argc, argv, NULL, NULL, &reader, &parser,
+ NULL, NULL)) {
+ return -1;
+ }
+ bool arrival_time_only = (argc >= 5 && strncmp(argv[4], "-t", 2) == 0);
+ webrtc::scoped_ptr<RtpPacketSourceInterface> rtp_reader(reader);
+ webrtc::scoped_ptr<webrtc::RtpHeaderParser> rtp_parser(parser);
+ fprintf(stdout, "seqnum timestamp ts_offset abs_sendtime recvtime "
+ "markerbit ssrc size\n");
+ int packet_counter = 0;
+ static const uint32_t kMaxPacketSize = 1500;
+ uint8_t packet_buffer[kMaxPacketSize];
+ uint8_t* packet = packet_buffer;
+ uint32_t packet_length = kMaxPacketSize;
+ uint32_t time_ms = 0;
+ int non_zero_abs_send_time = 0;
+ int non_zero_ts_offsets = 0;
+ while (rtp_reader->NextPacket(packet, &packet_length, &time_ms) == 0) {
+ webrtc::RTPHeader header;
+ parser->Parse(packet, packet_length, &header);
+ if (header.extension.absoluteSendTime != 0)
+ ++non_zero_abs_send_time;
+ if (header.extension.transmissionTimeOffset != 0)
+ ++non_zero_ts_offsets;
+ if (arrival_time_only) {
+ std::stringstream ss;
+ ss << static_cast<int64_t>(time_ms) * 1000000;
+ fprintf(stdout, "%s\n", ss.str().c_str());
+ } else {
+ fprintf(stdout, "%u %u %d %u %u %d %u %u\n", header.sequenceNumber,
+ header.timestamp, header.extension.transmissionTimeOffset,
+ header.extension.absoluteSendTime, time_ms, header.markerBit,
+ header.ssrc, packet_length);
+ }
+ packet_length = kMaxPacketSize;
+ ++packet_counter;
+ }
+ fprintf(stderr, "Parsed %d packets\n", packet_counter);
+ fprintf(stderr, "Packets with non-zero absolute send time: %d\n",
+ non_zero_abs_send_time);
+ fprintf(stderr, "Packets with non-zero timestamp offset: %d\n",
+ non_zero_ts_offsets);
+ return 0;
+}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/fec_receiver.h b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/fec_receiver.h
index 97b200f0777..e2ef4b1e972 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/fec_receiver.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/fec_receiver.h
@@ -18,7 +18,7 @@ namespace webrtc {
class FecReceiver {
public:
- static FecReceiver* Create(int32_t id, RtpData* callback);
+ static FecReceiver* Create(RtpData* callback);
virtual ~FecReceiver() {}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/receive_statistics.h b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/receive_statistics.h
index 707adaa0cd3..6f2ea4fb3e6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/receive_statistics.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/receive_statistics.h
@@ -23,24 +23,9 @@ class Clock;
class StreamStatistician {
public:
- struct Statistics {
- Statistics()
- : fraction_lost(0),
- cumulative_lost(0),
- extended_max_sequence_number(0),
- jitter(0),
- max_jitter(0) {}
-
- uint8_t fraction_lost;
- uint32_t cumulative_lost;
- uint32_t extended_max_sequence_number;
- uint32_t jitter;
- uint32_t max_jitter;
- };
-
virtual ~StreamStatistician();
- virtual bool GetStatistics(Statistics* statistics, bool reset) = 0;
+ virtual bool GetStatistics(RtcpStatistics* statistics, bool reset) = 0;
virtual void GetDataCounters(uint32_t* bytes_received,
uint32_t* packets_received) const = 0;
virtual uint32_t BitrateReceived() const = 0;
@@ -66,9 +51,13 @@ class ReceiveStatistics : public Module {
static ReceiveStatistics* Create(Clock* clock);
// Updates the receive statistics with this packet.
- virtual void IncomingPacket(const RTPHeader& rtp_header, size_t bytes,
+ virtual void IncomingPacket(const RTPHeader& rtp_header,
+ size_t bytes,
bool retransmitted) = 0;
+ // Increment counter for number of FEC packets received.
+ virtual void FecPacketReceived(uint32_t ssrc) = 0;
+
// Returns a map of all statisticians which have seen an incoming packet
// during the last two seconds.
virtual StatisticianMap GetActiveStatisticians() const = 0;
@@ -78,17 +67,31 @@ class ReceiveStatistics : public Module {
// Sets the max reordering threshold in number of packets.
virtual void SetMaxReorderingThreshold(int max_reordering_threshold) = 0;
+
+ // Called on new RTCP stats creation.
+ virtual void RegisterRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback) = 0;
+
+ // Called on new RTP stats creation.
+ virtual void RegisterRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) = 0;
};
class NullReceiveStatistics : public ReceiveStatistics {
public:
- virtual void IncomingPacket(const RTPHeader& rtp_header, size_t bytes,
+ virtual void IncomingPacket(const RTPHeader& rtp_header,
+ size_t bytes,
bool retransmitted) OVERRIDE;
+ virtual void FecPacketReceived(uint32_t ssrc) OVERRIDE;
virtual StatisticianMap GetActiveStatisticians() const OVERRIDE;
virtual StreamStatistician* GetStatistician(uint32_t ssrc) const OVERRIDE;
virtual int32_t TimeUntilNextProcess() OVERRIDE;
virtual int32_t Process() OVERRIDE;
virtual void SetMaxReorderingThreshold(int max_reordering_threshold) OVERRIDE;
+ virtual void RegisterRtcpStatisticsCallback(RtcpStatisticsCallback* callback)
+ OVERRIDE;
+ virtual void RegisterRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) OVERRIDE;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h
new file mode 100644
index 00000000000..25f0f2ecf98
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_REMOTE_NTP_TIME_ESTIMATOR_H_
+#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_REMOTE_NTP_TIME_ESTIMATOR_H_
+
+#include "webrtc/system_wrappers/interface/rtp_to_ntp.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class Clock;
+class RtpRtcp;
+class TimestampExtrapolator;
+
+// RemoteNtpTimeEstimator can be used to estimate a given RTP timestamp's NTP
+// time in local timebase.
+// Note that it needs to be trained with at least 2 RTCP SR (by calling
+// |UpdateRtcpTimestamp|) before it can be used.
+class RemoteNtpTimeEstimator {
+ public:
+ explicit RemoteNtpTimeEstimator(Clock* clock);
+
+ ~RemoteNtpTimeEstimator();
+
+ // Updates the estimator with the timestamp from newly received RTCP SR for
+ // |ssrc|. The RTCP SR is read from |rtp_rtcp|.
+ bool UpdateRtcpTimestamp(uint32_t ssrc, RtpRtcp* rtp_rtcp);
+
+ // Estimates the NTP timestamp in local timebase from |rtp_timestamp|.
+ // Returns the NTP timestamp in ms when success. -1 if failed.
+ int64_t Estimate(uint32_t rtp_timestamp);
+
+ private:
+ Clock* clock_;
+ scoped_ptr<TimestampExtrapolator> ts_extrapolator_;
+ RtcpList rtcp_list_;
+ DISALLOW_COPY_AND_ASSIGN(RemoteNtpTimeEstimator);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_REMOTE_NTP_TIME_ESTIMATOR_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h
index 3ea4dcd1beb..965f4b02421 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h
@@ -54,8 +54,7 @@ class RTPPayloadStrategy {
class RTPPayloadRegistry {
public:
// The registry takes ownership of the strategy.
- RTPPayloadRegistry(const int32_t id,
- RTPPayloadStrategy* rtp_payload_strategy);
+ RTPPayloadRegistry(RTPPayloadStrategy* rtp_payload_strategy);
~RTPPayloadRegistry();
int32_t RegisterReceivePayload(
@@ -76,10 +75,10 @@ class RTPPayloadRegistry {
const uint32_t rate,
int8_t* payload_type) const;
- void SetRtxStatus(bool enable, uint32_t ssrc);
-
bool RtxEnabled() const;
+ void SetRtxSsrc(uint32_t ssrc);
+
void SetRtxPayloadType(int payload_type);
bool IsRtx(const RTPHeader& header) const;
@@ -153,7 +152,6 @@ class RTPPayloadRegistry {
scoped_ptr<CriticalSectionWrapper> crit_sect_;
ModuleRTPUtility::PayloadTypeMap payload_type_map_;
- int32_t id_;
scoped_ptr<RTPPayloadStrategy> rtp_payload_strategy_;
int8_t red_payload_type_;
int8_t ulpfec_payload_type_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h
index 67dad0d977b..95c565f01bf 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h
@@ -213,7 +213,7 @@ class RtpRtcp : public Module {
*
* return -1 on failure else 0
*/
- virtual int32_t SetSSRC(const uint32_t ssrc) = 0;
+ virtual void SetSSRC(const uint32_t ssrc) = 0;
/*
* Get CSRC
@@ -249,10 +249,14 @@ class RtpRtcp : public Module {
virtual int32_t SetCSRCStatus(const bool include) = 0;
/*
- * Turn on/off sending RTX (RFC 4588) on a specific SSRC.
+ * Turn on/off sending RTX (RFC 4588). The modes can be set as a combination
+ * of values of the enumerator RtxMode.
*/
- virtual int32_t SetRTXSendStatus(int modes, bool set_ssrc,
- uint32_t ssrc) = 0;
+ virtual void SetRTXSendStatus(int modes) = 0;
+
+ // Sets the SSRC to use when sending RTX packets. This doesn't enable RTX,
+ // only the SSRC is set.
+ virtual void SetRtxSsrc(uint32_t ssrc) = 0;
// Sets the payload type to use when sending RTX packets. Note that this
// doesn't enable RTX, only the payload type is set.
@@ -261,8 +265,8 @@ class RtpRtcp : public Module {
/*
* Get status of sending RTX (RFC 4588) on a specific SSRC.
*/
- virtual int32_t RTXSendStatus(int* modes, uint32_t* ssrc,
- int* payloadType) const = 0;
+ virtual void RTXSendStatus(int* modes, uint32_t* ssrc,
+ int* payloadType) const = 0;
/*
* sends kRtcpByeCode when going from true to false
@@ -508,6 +512,13 @@ class RtpRtcp : public Module {
virtual int32_t RemoveRTCPReportBlock(const uint32_t SSRC) = 0;
/*
+ * Get number of sent and received RTCP packet types.
+ */
+ virtual void GetRtcpPacketTypeCounters(
+ RtcpPacketTypeCounter* packets_sent,
+ RtcpPacketTypeCounter* packets_received) const = 0;
+
+ /*
* (APP) Application specific data
*
* return -1 on failure else 0
@@ -664,25 +675,6 @@ class RtpRtcp : public Module {
int8_t& payloadType) const = 0;
/*
- * Set status and ID for header-extension-for-audio-level-indication.
- * See http://tools.ietf.org/html/rfc6464 for more details.
- *
- * return -1 on failure else 0
- */
- virtual int32_t SetRTPAudioLevelIndicationStatus(
- const bool enable,
- const uint8_t ID) = 0;
-
- /*
- * Get status and ID for header-extension-for-audio-level-indication.
- *
- * return -1 on failure else 0
- */
- virtual int32_t GetRTPAudioLevelIndicationStatus(
- bool& enable,
- uint8_t& ID) const = 0;
-
- /*
* Store the audio level in dBov for header-extension-for-audio-level-
* indication.
* This API shall be called before transmision of an RTP packet to ensure
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h
index b66e927ae59..6f99f938de6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h
@@ -27,6 +27,9 @@ namespace webrtc {
const int kVideoPayloadTypeFrequency = 90000;
+// Minimum RTP header size in bytes.
+const uint8_t kRtpHeaderSize = 12;
+
struct AudioPayload
{
uint32_t frequency;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
index 42c7b4eab91..03156c79df4 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
@@ -77,17 +77,19 @@ class MockRtpRtcp : public RtpRtcp {
MOCK_CONST_METHOD0(SSRC,
uint32_t());
MOCK_METHOD1(SetSSRC,
- int32_t(const uint32_t ssrc));
+ void(const uint32_t ssrc));
MOCK_CONST_METHOD1(CSRCs,
int32_t(uint32_t arrOfCSRC[kRtpCsrcSize]));
MOCK_METHOD2(SetCSRCs,
int32_t(const uint32_t arrOfCSRC[kRtpCsrcSize], const uint8_t arrLength));
MOCK_METHOD1(SetCSRCStatus,
int32_t(const bool include));
- MOCK_METHOD3(SetRTXSendStatus,
- int32_t(int modes, bool setSSRC, uint32_t ssrc));
+ MOCK_METHOD1(SetRTXSendStatus,
+ void(int modes));
MOCK_CONST_METHOD3(RTXSendStatus,
- int32_t(int* modes, uint32_t* ssrc, int* payload_type));
+ void(int* modes, uint32_t* ssrc, int* payload_type));
+ MOCK_METHOD1(SetRtxSsrc,
+ void(uint32_t));
MOCK_METHOD1(SetRtxSendPayloadType,
void(int));
MOCK_METHOD1(SetSendingStatus,
@@ -168,6 +170,8 @@ class MockRtpRtcp : public RtpRtcp {
int32_t(const uint32_t SSRC, const RTCPReportBlock* receiveBlock));
MOCK_METHOD1(RemoveRTCPReportBlock,
int32_t(const uint32_t SSRC));
+ MOCK_CONST_METHOD2(GetRtcpPacketTypeCounters,
+ void(RtcpPacketTypeCounter*, RtcpPacketTypeCounter*));
MOCK_METHOD4(SetRTCPApplicationSpecificData,
int32_t(const uint8_t subType, const uint32_t name, const uint8_t* data, const uint16_t length));
MOCK_METHOD1(SetRTCPVoIPMetrics,
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/OWNERS b/chromium/third_party/webrtc/modules/rtp_rtcp/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc
index 0dc142f867c..0d6c174a3b9 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.cc
@@ -16,20 +16,19 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
// RFC 5109
namespace webrtc {
-FecReceiver* FecReceiver::Create(int32_t id, RtpData* callback) {
- return new FecReceiverImpl(id, callback);
+FecReceiver* FecReceiver::Create(RtpData* callback) {
+ return new FecReceiverImpl(callback);
}
-FecReceiverImpl::FecReceiverImpl(const int32_t id, RtpData* callback)
- : id_(id),
- crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+FecReceiverImpl::FecReceiverImpl(RtpData* callback)
+ : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
recovered_packet_callback_(callback),
- fec_(new ForwardErrorCorrection(id)) {}
+ fec_(new ForwardErrorCorrection()) {}
FecReceiverImpl::~FecReceiverImpl() {
while (!received_packet_list_.empty()) {
@@ -103,8 +102,7 @@ int32_t FecReceiverImpl::AddReceivedRedPacket(
if (timestamp_offset != 0) {
// |timestampOffset| should be 0. However, it's possible this is the first
// location a corrupt payload can be caught, so don't assert.
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
- "Corrupt payload found in %s", __FUNCTION__);
+ LOG(LS_WARNING) << "Corrupt payload found.";
delete received_packet;
return -1;
}
@@ -223,6 +221,7 @@ int32_t FecReceiverImpl::ProcessReceivedFec() {
crit_sect_->Enter();
}
if (fec_->DecodeFEC(&received_packet_list_, &recovered_packet_list_) != 0) {
+ crit_sect_->Leave();
return -1;
}
assert(received_packet_list_.empty());
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h
index 03421235c3d..b876bedc9c3 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_impl.h
@@ -25,7 +25,7 @@ class CriticalSectionWrapper;
class FecReceiverImpl : public FecReceiver {
public:
- FecReceiverImpl(const int32_t id, RtpData* callback);
+ FecReceiverImpl(RtpData* callback);
virtual ~FecReceiverImpl();
virtual int32_t AddReceivedRedPacket(const RTPHeader& rtp_header,
@@ -36,7 +36,6 @@ class FecReceiverImpl : public FecReceiver {
virtual int32_t ProcessReceivedFec() OVERRIDE;
private:
- int id_;
scoped_ptr<CriticalSectionWrapper> crit_sect_;
RtpData* recovered_packet_callback_;
ForwardErrorCorrection* fec_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc
index 2e8846c3b29..0b1244941c3 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc
@@ -39,8 +39,8 @@ class MockRtpData : public RtpData {
class ReceiverFecTest : public ::testing::Test {
protected:
virtual void SetUp() {
- fec_ = new ForwardErrorCorrection(0);
- receiver_fec_ = FecReceiver::Create(0, &rtp_data_callback_);
+ fec_ = new ForwardErrorCorrection();
+ receiver_fec_ = FecReceiver::Create(&rtp_data_callback_);
generator_ = new FrameGenerator();
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_test_helper.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_test_helper.h
index e3c3581be73..e6426ea7eea 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_test_helper.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/fec_test_helper.h
@@ -17,9 +17,6 @@
namespace webrtc {
enum {
- kRtpHeaderSize = 12
-};
-enum {
kFecPayloadType = 96
};
enum {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc
index 189e1b052f9..31303c8ad19 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc
@@ -17,15 +17,13 @@
#include <algorithm>
#include <iterator>
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
-// Minimum RTP header size in bytes.
-const uint8_t kRtpHeaderSize = 12;
-
// FEC header size in bytes.
const uint8_t kFecHeaderSize = 10;
@@ -84,9 +82,8 @@ ForwardErrorCorrection::ReceivedPacket::~ReceivedPacket() {}
ForwardErrorCorrection::RecoveredPacket::RecoveredPacket() {}
ForwardErrorCorrection::RecoveredPacket::~RecoveredPacket() {}
-ForwardErrorCorrection::ForwardErrorCorrection(int32_t id)
- : id_(id),
- generated_fec_packets_(kMaxMediaPackets),
+ForwardErrorCorrection::ForwardErrorCorrection()
+ : generated_fec_packets_(kMaxMediaPackets),
fec_packet_received_(false) {}
ForwardErrorCorrection::~ForwardErrorCorrection() {}
@@ -114,43 +111,23 @@ int32_t ForwardErrorCorrection::GenerateFEC(const PacketList& media_packet_list,
bool use_unequal_protection,
FecMaskType fec_mask_type,
PacketList* fec_packet_list) {
- if (media_packet_list.empty()) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s media packet list is empty", __FUNCTION__);
- return -1;
- }
- if (!fec_packet_list->empty()) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s FEC packet list is not empty", __FUNCTION__);
- return -1;
- }
const uint16_t num_media_packets = media_packet_list.size();
- bool l_bit = (num_media_packets > 8 * kMaskSizeLBitClear);
- int num_maskBytes = l_bit ? kMaskSizeLBitSet : kMaskSizeLBitClear;
+
+ // Sanity check arguments.
+ assert(num_media_packets > 0);
+ assert(num_important_packets >= 0 &&
+ num_important_packets <= num_media_packets);
+ assert(fec_packet_list->empty());
if (num_media_packets > kMaxMediaPackets) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s can only protect %d media packets per frame; %d requested",
- __FUNCTION__, kMaxMediaPackets, num_media_packets);
+ LOG(LS_WARNING) << "Can't protect " << num_media_packets
+ << " media packets per frame. Max is " << kMaxMediaPackets;
return -1;
}
- // Error checking on the number of important packets.
- // Can't have more important packets than media packets.
- if (num_important_packets > num_media_packets) {
- WEBRTC_TRACE(
- kTraceError, kTraceRtpRtcp, id_,
- "Number of important packets (%d) greater than number of media "
- "packets (%d)",
- num_important_packets, num_media_packets);
- return -1;
- }
- if (num_important_packets < 0) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "Number of important packets (%d) less than zero",
- num_important_packets);
- return -1;
- }
+ bool l_bit = (num_media_packets > 8 * kMaskSizeLBitClear);
+ int num_maskBytes = l_bit ? kMaskSizeLBitSet : kMaskSizeLBitClear;
+
// Do some error checking on the media packets.
PacketList::const_iterator media_list_it = media_packet_list.begin();
while (media_list_it != media_packet_list.end()) {
@@ -158,20 +135,16 @@ int32_t ForwardErrorCorrection::GenerateFEC(const PacketList& media_packet_list,
assert(media_packet);
if (media_packet->length < kRtpHeaderSize) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s media packet (%d bytes) is smaller than RTP header",
- __FUNCTION__, media_packet->length);
+ LOG(LS_WARNING) << "Media packet " << media_packet->length << " bytes "
+ << "is smaller than RTP header.";
return -1;
}
// Ensure our FEC packets will fit in a typical MTU.
if (media_packet->length + PacketOverhead() + kTransportOverhead >
IP_PACKET_SIZE) {
- WEBRTC_TRACE(
- kTraceError, kTraceRtpRtcp, id_,
- "%s media packet (%d bytes) with overhead is larger than MTU(%d)",
- __FUNCTION__, media_packet->length, IP_PACKET_SIZE);
- return -1;
+ LOG(LS_WARNING) << "Media packet " << media_packet->length << " bytes "
+ << "with overhead is larger than " << IP_PACKET_SIZE;
}
media_list_it++;
}
@@ -584,9 +557,7 @@ void ForwardErrorCorrection::InsertFECPacket(
}
if (fec_packet->protected_pkt_list.empty()) {
// All-zero packet mask; we can discard this FEC packet.
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
- "FEC packet %u has an all-zero packet mask.",
- fec_packet->seq_num, __FUNCTION__);
+ LOG(LS_WARNING) << "FEC packet has an all-zero packet mask.";
delete fec_packet;
} else {
AssignRecoveredPackets(fec_packet, recovered_packet_list);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.h
index 8910fe477d3..bb790f356f2 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction.h
@@ -117,8 +117,7 @@ class ForwardErrorCorrection {
typedef std::list<ReceivedPacket*> ReceivedPacketList;
typedef std::list<RecoveredPacket*> RecoveredPacketList;
- // \param[in] id Module ID
- ForwardErrorCorrection(int32_t id);
+ ForwardErrorCorrection();
virtual ~ForwardErrorCorrection();
@@ -304,7 +303,6 @@ class ForwardErrorCorrection {
static void DiscardOldPackets(RecoveredPacketList* recovered_packet_list);
static uint16_t ParseSequenceNumber(uint8_t* packet);
- int32_t id_;
std::vector<Packet> generated_fec_packets_;
FecPacketList fec_packet_list_;
bool fec_packet_received_;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h
index ddac3cd113a..f82e46d5723 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/forward_error_correction_internal.h
@@ -8,6 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_FORWARD_ERROR_CORRECTION_INTERNAL_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_FORWARD_ERROR_CORRECTION_INTERNAL_H_
+
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
#include "webrtc/typedefs.h"
@@ -64,3 +67,4 @@ void GeneratePacketMasks(int num_media_packets, int num_fec_packets,
} // namespace internal
} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_FORWARD_ERROR_CORRECTION_INTERNAL_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc
index 8c6cc5434cd..209af40bc43 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/nack_rtx_unittest.cc
@@ -164,7 +164,7 @@ class RtxLoopBackTransport : public webrtc::Transport {
class RtpRtcpRtxNackTest : public ::testing::Test {
protected:
RtpRtcpRtxNackTest()
- : rtp_payload_registry_(0, RTPPayloadStrategy::CreateStrategy(false)),
+ : rtp_payload_registry_(RTPPayloadStrategy::CreateStrategy(false)),
rtp_rtcp_module_(NULL),
transport_(kTestSsrc + 1),
receiver_(),
@@ -188,7 +188,7 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
kTestId, &fake_clock, &receiver_, rtp_feedback_.get(),
&rtp_payload_registry_));
- EXPECT_EQ(0, rtp_rtcp_module_->SetSSRC(kTestSsrc));
+ rtp_rtcp_module_->SetSSRC(kTestSsrc);
EXPECT_EQ(0, rtp_rtcp_module_->SetRTCPStatus(kRtcpCompound));
rtp_receiver_->SetNACKStatus(kNackRtcp);
EXPECT_EQ(0, rtp_rtcp_module_->SetStorePacketsStatus(true, 600));
@@ -253,9 +253,9 @@ class RtpRtcpRtxNackTest : public ::testing::Test {
}
void RunRtxTest(RtxMode rtx_method, int loss) {
- rtp_payload_registry_.SetRtxStatus(true, kTestSsrc + 1);
- EXPECT_EQ(0, rtp_rtcp_module_->SetRTXSendStatus(rtx_method, true,
- kTestSsrc + 1));
+ rtp_payload_registry_.SetRtxSsrc(kTestSsrc + 1);
+ rtp_rtcp_module_->SetRTXSendStatus(rtx_method);
+ rtp_rtcp_module_->SetRtxSsrc(kTestSsrc + 1);
transport_.DropEveryNthPacket(loss);
uint32_t timestamp = 3000;
uint16_t nack_list[kVideoNackListSize];
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc
index ada7d70266b..baa3827949c 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/producer_fec_unittest.cc
@@ -39,7 +39,7 @@ void VerifyHeader(uint16_t seq_num,
class ProducerFecTest : public ::testing::Test {
protected:
virtual void SetUp() {
- fec_ = new ForwardErrorCorrection(0);
+ fec_ = new ForwardErrorCorrection();
producer_ = new ProducerFec(fec_);
generator_ = new FrameGenerator;
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc
index 3ed44b83701..aa7c9c57156 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc
@@ -24,14 +24,16 @@ const int kStatisticsProcessIntervalMs = 1000;
StreamStatistician::~StreamStatistician() {}
-StreamStatisticianImpl::StreamStatisticianImpl(Clock* clock)
+StreamStatisticianImpl::StreamStatisticianImpl(
+ Clock* clock,
+ RtcpStatisticsCallback* rtcp_callback,
+ StreamDataCountersCallback* rtp_callback)
: clock_(clock),
- crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+ stream_lock_(CriticalSectionWrapper::CreateCriticalSection()),
incoming_bitrate_(clock, NULL),
ssrc_(0),
max_reordering_threshold_(kDefaultMaxReorderingThreshold),
jitter_q4_(0),
- jitter_max_q4_(0),
cumulative_loss_(0),
jitter_q4_transmission_time_offset_(0),
last_receive_time_ms_(0),
@@ -42,53 +44,53 @@ StreamStatisticianImpl::StreamStatisticianImpl(Clock* clock)
received_seq_first_(0),
received_seq_max_(0),
received_seq_wraps_(0),
- first_packet_(true),
received_packet_overhead_(12),
- received_byte_count_(0),
- received_retransmitted_packets_(0),
- received_inorder_packet_count_(0),
last_report_inorder_packets_(0),
last_report_old_packets_(0),
last_report_seq_max_(0),
- last_reported_statistics_() {}
+ rtcp_callback_(rtcp_callback),
+ rtp_callback_(rtp_callback) {}
void StreamStatisticianImpl::ResetStatistics() {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(stream_lock_.get());
last_report_inorder_packets_ = 0;
last_report_old_packets_ = 0;
last_report_seq_max_ = 0;
- memset(&last_reported_statistics_, 0, sizeof(last_reported_statistics_));
+ last_reported_statistics_ = RtcpStatistics();
jitter_q4_ = 0;
- jitter_max_q4_ = 0;
cumulative_loss_ = 0;
jitter_q4_transmission_time_offset_ = 0;
received_seq_wraps_ = 0;
received_seq_max_ = 0;
received_seq_first_ = 0;
- received_byte_count_ = 0;
- received_retransmitted_packets_ = 0;
- received_inorder_packet_count_ = 0;
- first_packet_ = true;
+ receive_counters_ = StreamDataCounters();
}
void StreamStatisticianImpl::IncomingPacket(const RTPHeader& header,
size_t bytes,
bool retransmitted) {
- CriticalSectionScoped cs(crit_sect_.get());
+ UpdateCounters(header, bytes, retransmitted);
+ NotifyRtpCallback();
+}
+
+void StreamStatisticianImpl::UpdateCounters(const RTPHeader& header,
+ size_t bytes,
+ bool retransmitted) {
+ CriticalSectionScoped cs(stream_lock_.get());
bool in_order = InOrderPacketInternal(header.sequenceNumber);
ssrc_ = header.ssrc;
incoming_bitrate_.Update(bytes);
- received_byte_count_ += bytes;
+ receive_counters_.bytes +=
+ bytes - (header.paddingLength + header.headerLength);
+ receive_counters_.header_bytes += header.headerLength;
+ receive_counters_.padding_bytes += header.paddingLength;
+ ++receive_counters_.packets;
+ if (!in_order && retransmitted) {
+ ++receive_counters_.retransmitted_packets;
+ }
- if (first_packet_) {
- first_packet_ = false;
- // This is the first received report.
+ if (receive_counters_.packets == 1) {
received_seq_first_ = header.sequenceNumber;
- received_seq_max_ = header.sequenceNumber;
- received_inorder_packet_count_ = 1;
- clock_->CurrentNtp(last_receive_time_secs_, last_receive_time_frac_);
- last_receive_time_ms_ = clock_->TimeInMilliseconds();
- return;
}
// Count only the new packets received. That is, if packets 1, 2, 3, 5, 4, 6
@@ -98,66 +100,27 @@ void StreamStatisticianImpl::IncomingPacket(const RTPHeader& header,
uint32_t receive_time_secs;
uint32_t receive_time_frac;
clock_->CurrentNtp(receive_time_secs, receive_time_frac);
- received_inorder_packet_count_++;
// Wrong if we use RetransmitOfOldPacket.
- int32_t seq_diff = header.sequenceNumber - received_seq_max_;
- if (seq_diff < 0) {
+ if (receive_counters_.packets > 1 &&
+ received_seq_max_ > header.sequenceNumber) {
// Wrap around detected.
received_seq_wraps_++;
}
// New max.
received_seq_max_ = header.sequenceNumber;
+ // If new time stamp and more than one in-order packet received, calculate
+ // new jitter statistics.
if (header.timestamp != last_received_timestamp_ &&
- received_inorder_packet_count_ > 1) {
- uint32_t receive_time_rtp = ModuleRTPUtility::ConvertNTPTimeToRTP(
- receive_time_secs, receive_time_frac, header.payload_type_frequency);
- uint32_t last_receive_time_rtp = ModuleRTPUtility::ConvertNTPTimeToRTP(
- last_receive_time_secs_, last_receive_time_frac_,
- header.payload_type_frequency);
- int32_t time_diff_samples = (receive_time_rtp - last_receive_time_rtp) -
- (header.timestamp - last_received_timestamp_);
-
- time_diff_samples = abs(time_diff_samples);
-
- // lib_jingle sometimes deliver crazy jumps in TS for the same stream.
- // If this happens, don't update jitter value. Use 5 secs video frequency
- // as the threshold.
- if (time_diff_samples < 450000) {
- // Note we calculate in Q4 to avoid using float.
- int32_t jitter_diff_q4 = (time_diff_samples << 4) - jitter_q4_;
- jitter_q4_ += ((jitter_diff_q4 + 8) >> 4);
- }
-
- // Extended jitter report, RFC 5450.
- // Actual network jitter, excluding the source-introduced jitter.
- int32_t time_diff_samples_ext =
- (receive_time_rtp - last_receive_time_rtp) -
- ((header.timestamp +
- header.extension.transmissionTimeOffset) -
- (last_received_timestamp_ +
- last_received_transmission_time_offset_));
-
- time_diff_samples_ext = abs(time_diff_samples_ext);
-
- if (time_diff_samples_ext < 450000) {
- int32_t jitter_diffQ4TransmissionTimeOffset =
- (time_diff_samples_ext << 4) - jitter_q4_transmission_time_offset_;
- jitter_q4_transmission_time_offset_ +=
- ((jitter_diffQ4TransmissionTimeOffset + 8) >> 4);
- }
+ (receive_counters_.packets - receive_counters_.retransmitted_packets) >
+ 1) {
+ UpdateJitter(header, receive_time_secs, receive_time_frac);
}
last_received_timestamp_ = header.timestamp;
last_receive_time_secs_ = receive_time_secs;
last_receive_time_frac_ = receive_time_frac;
last_receive_time_ms_ = clock_->TimeInMilliseconds();
- } else {
- if (retransmitted) {
- received_retransmitted_packets_++;
- } else {
- received_inorder_packet_count_++;
- }
}
uint16_t packet_oh = header.headerLength + header.paddingLength;
@@ -167,29 +130,113 @@ void StreamStatisticianImpl::IncomingPacket(const RTPHeader& header,
received_packet_overhead_ = (15 * received_packet_overhead_ + packet_oh) >> 4;
}
+void StreamStatisticianImpl::UpdateJitter(const RTPHeader& header,
+ uint32_t receive_time_secs,
+ uint32_t receive_time_frac) {
+ uint32_t receive_time_rtp = ModuleRTPUtility::ConvertNTPTimeToRTP(
+ receive_time_secs, receive_time_frac, header.payload_type_frequency);
+ uint32_t last_receive_time_rtp = ModuleRTPUtility::ConvertNTPTimeToRTP(
+ last_receive_time_secs_, last_receive_time_frac_,
+ header.payload_type_frequency);
+ int32_t time_diff_samples = (receive_time_rtp - last_receive_time_rtp) -
+ (header.timestamp - last_received_timestamp_);
+
+ time_diff_samples = abs(time_diff_samples);
+
+ // lib_jingle sometimes deliver crazy jumps in TS for the same stream.
+ // If this happens, don't update jitter value. Use 5 secs video frequency
+ // as the threshold.
+ if (time_diff_samples < 450000) {
+ // Note we calculate in Q4 to avoid using float.
+ int32_t jitter_diff_q4 = (time_diff_samples << 4) - jitter_q4_;
+ jitter_q4_ += ((jitter_diff_q4 + 8) >> 4);
+ }
+
+ // Extended jitter report, RFC 5450.
+ // Actual network jitter, excluding the source-introduced jitter.
+ int32_t time_diff_samples_ext =
+ (receive_time_rtp - last_receive_time_rtp) -
+ ((header.timestamp +
+ header.extension.transmissionTimeOffset) -
+ (last_received_timestamp_ +
+ last_received_transmission_time_offset_));
+
+ time_diff_samples_ext = abs(time_diff_samples_ext);
+
+ if (time_diff_samples_ext < 450000) {
+ int32_t jitter_diffQ4TransmissionTimeOffset =
+ (time_diff_samples_ext << 4) - jitter_q4_transmission_time_offset_;
+ jitter_q4_transmission_time_offset_ +=
+ ((jitter_diffQ4TransmissionTimeOffset + 8) >> 4);
+ }
+}
+
+void StreamStatisticianImpl::NotifyRtpCallback() {
+ StreamDataCounters data;
+ uint32_t ssrc;
+ {
+ CriticalSectionScoped cs(stream_lock_.get());
+ data = receive_counters_;
+ ssrc = ssrc_;
+ }
+ rtp_callback_->DataCountersUpdated(data, ssrc);
+}
+
+void StreamStatisticianImpl::NotifyRtcpCallback() {
+ RtcpStatistics data;
+ uint32_t ssrc;
+ {
+ CriticalSectionScoped cs(stream_lock_.get());
+ data = last_reported_statistics_;
+ ssrc = ssrc_;
+ }
+ rtcp_callback_->StatisticsUpdated(data, ssrc);
+}
+
+void StreamStatisticianImpl::FecPacketReceived() {
+ {
+ CriticalSectionScoped cs(stream_lock_.get());
+ ++receive_counters_.fec_packets;
+ }
+ NotifyRtpCallback();
+}
+
void StreamStatisticianImpl::SetMaxReorderingThreshold(
int max_reordering_threshold) {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(stream_lock_.get());
max_reordering_threshold_ = max_reordering_threshold;
}
-bool StreamStatisticianImpl::GetStatistics(Statistics* statistics, bool reset) {
- CriticalSectionScoped cs(crit_sect_.get());
- if (received_seq_first_ == 0 && received_byte_count_ == 0) {
- // We have not received anything.
- return false;
- }
-
- if (!reset) {
- if (last_report_inorder_packets_ == 0) {
- // No report.
+bool StreamStatisticianImpl::GetStatistics(RtcpStatistics* statistics,
+ bool reset) {
+ {
+ CriticalSectionScoped cs(stream_lock_.get());
+ if (received_seq_first_ == 0 && receive_counters_.bytes == 0) {
+ // We have not received anything.
return false;
}
- // Just get last report.
- *statistics = last_reported_statistics_;
- return true;
+
+ if (!reset) {
+ if (last_report_inorder_packets_ == 0) {
+ // No report.
+ return false;
+ }
+ // Just get last report.
+ *statistics = last_reported_statistics_;
+ return true;
+ }
+
+ *statistics = CalculateRtcpStatistics();
}
+ NotifyRtcpCallback();
+
+ return true;
+}
+
+RtcpStatistics StreamStatisticianImpl::CalculateRtcpStatistics() {
+ RtcpStatistics stats;
+
if (last_report_inorder_packets_ == 0) {
// First time we send a report.
last_report_seq_max_ = received_seq_first_ - 1;
@@ -206,7 +253,8 @@ bool StreamStatisticianImpl::GetStatistics(Statistics* statistics, bool reset) {
// Number of received RTP packets since last report, counts all packets but
// not re-transmissions.
uint32_t rec_since_last =
- received_inorder_packet_count_ - last_report_inorder_packets_;
+ (receive_counters_.packets - receive_counters_.retransmitted_packets) -
+ last_report_inorder_packets_;
// With NACK we don't know the expected retransmissions during the last
// second. We know how many "old" packets we have received. We just count
@@ -218,7 +266,7 @@ bool StreamStatisticianImpl::GetStatistics(Statistics* statistics, bool reset) {
// re-transmitted. We use RTT to decide if a packet is re-ordered or
// re-transmitted.
uint32_t retransmitted_packets =
- received_retransmitted_packets_ - last_report_old_packets_;
+ receive_counters_.retransmitted_packets - last_report_old_packets_;
rec_since_last += retransmitted_packets;
int32_t missing = 0;
@@ -231,64 +279,60 @@ bool StreamStatisticianImpl::GetStatistics(Statistics* statistics, bool reset) {
local_fraction_lost =
static_cast<uint8_t>(255 * missing / exp_since_last);
}
- statistics->fraction_lost = local_fraction_lost;
+ stats.fraction_lost = local_fraction_lost;
// We need a counter for cumulative loss too.
cumulative_loss_ += missing;
-
- if (jitter_q4_ > jitter_max_q4_) {
- jitter_max_q4_ = jitter_q4_;
- }
- statistics->cumulative_lost = cumulative_loss_;
- statistics->extended_max_sequence_number = (received_seq_wraps_ << 16) +
- received_seq_max_;
+ stats.cumulative_lost = cumulative_loss_;
+ stats.extended_max_sequence_number =
+ (received_seq_wraps_ << 16) + received_seq_max_;
// Note: internal jitter value is in Q4 and needs to be scaled by 1/16.
- statistics->jitter = jitter_q4_ >> 4;
- statistics->max_jitter = jitter_max_q4_ >> 4;
- if (reset) {
- // Store this report.
- last_reported_statistics_ = *statistics;
-
- // Only for report blocks in RTCP SR and RR.
- last_report_inorder_packets_ = received_inorder_packet_count_;
- last_report_old_packets_ = received_retransmitted_packets_;
- last_report_seq_max_ = received_seq_max_;
- }
- return true;
+ stats.jitter = jitter_q4_ >> 4;
+
+ // Store this report.
+ last_reported_statistics_ = stats;
+
+ // Only for report blocks in RTCP SR and RR.
+ last_report_inorder_packets_ =
+ receive_counters_.packets - receive_counters_.retransmitted_packets;
+ last_report_old_packets_ = receive_counters_.retransmitted_packets;
+ last_report_seq_max_ = received_seq_max_;
+
+ return stats;
}
void StreamStatisticianImpl::GetDataCounters(
uint32_t* bytes_received, uint32_t* packets_received) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(stream_lock_.get());
if (bytes_received) {
- *bytes_received = received_byte_count_;
+ *bytes_received = receive_counters_.bytes + receive_counters_.header_bytes +
+ receive_counters_.padding_bytes;
}
if (packets_received) {
- *packets_received =
- received_retransmitted_packets_ + received_inorder_packet_count_;
+ *packets_received = receive_counters_.packets;
}
}
uint32_t StreamStatisticianImpl::BitrateReceived() const {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(stream_lock_.get());
return incoming_bitrate_.BitrateNow();
}
void StreamStatisticianImpl::ProcessBitrate() {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(stream_lock_.get());
incoming_bitrate_.Process();
}
void StreamStatisticianImpl::LastReceiveTimeNtp(uint32_t* secs,
uint32_t* frac) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(stream_lock_.get());
*secs = last_receive_time_secs_;
*frac = last_receive_time_frac_;
}
bool StreamStatisticianImpl::IsRetransmitOfOldPacket(
const RTPHeader& header, int min_rtt) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(stream_lock_.get());
if (InOrderPacketInternal(header.sequenceNumber)) {
return false;
}
@@ -323,7 +367,7 @@ bool StreamStatisticianImpl::IsRetransmitOfOldPacket(
}
bool StreamStatisticianImpl::IsPacketInOrder(uint16_t sequence_number) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(stream_lock_.get());
return InOrderPacketInternal(sequence_number);
}
@@ -348,8 +392,10 @@ ReceiveStatistics* ReceiveStatistics::Create(Clock* clock) {
ReceiveStatisticsImpl::ReceiveStatisticsImpl(Clock* clock)
: clock_(clock),
- crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- last_rate_update_ms_(0) {}
+ receive_statistics_lock_(CriticalSectionWrapper::CreateCriticalSection()),
+ last_rate_update_ms_(0),
+ rtcp_stats_callback_(NULL),
+ rtp_stats_callback_(NULL) {}
ReceiveStatisticsImpl::~ReceiveStatisticsImpl() {
while (!statisticians_.empty()) {
@@ -359,20 +405,31 @@ ReceiveStatisticsImpl::~ReceiveStatisticsImpl() {
}
void ReceiveStatisticsImpl::IncomingPacket(const RTPHeader& header,
- size_t bytes, bool old_packet) {
- CriticalSectionScoped cs(crit_sect_.get());
- StatisticianImplMap::iterator it = statisticians_.find(header.ssrc);
- if (it == statisticians_.end()) {
- std::pair<StatisticianImplMap::iterator, uint32_t> insert_result =
- statisticians_.insert(std::make_pair(
- header.ssrc, new StreamStatisticianImpl(clock_)));
- it = insert_result.first;
+ size_t bytes,
+ bool retransmitted) {
+ StatisticianImplMap::iterator it;
+ {
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
+ it = statisticians_.find(header.ssrc);
+ if (it == statisticians_.end()) {
+ std::pair<StatisticianImplMap::iterator, uint32_t> insert_result =
+ statisticians_.insert(std::make_pair(
+ header.ssrc, new StreamStatisticianImpl(clock_, this, this)));
+ it = insert_result.first;
+ }
}
- statisticians_[header.ssrc]->IncomingPacket(header, bytes, old_packet);
+ it->second->IncomingPacket(header, bytes, retransmitted);
+}
+
+void ReceiveStatisticsImpl::FecPacketReceived(uint32_t ssrc) {
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
+ StatisticianImplMap::iterator it = statisticians_.find(ssrc);
+ assert(it != statisticians_.end());
+ it->second->FecPacketReceived();
}
void ReceiveStatisticsImpl::ChangeSsrc(uint32_t from_ssrc, uint32_t to_ssrc) {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
StatisticianImplMap::iterator from_it = statisticians_.find(from_ssrc);
if (from_it == statisticians_.end())
return;
@@ -383,7 +440,7 @@ void ReceiveStatisticsImpl::ChangeSsrc(uint32_t from_ssrc, uint32_t to_ssrc) {
}
StatisticianMap ReceiveStatisticsImpl::GetActiveStatisticians() const {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
StatisticianMap active_statisticians;
for (StatisticianImplMap::const_iterator it = statisticians_.begin();
it != statisticians_.end(); ++it) {
@@ -400,7 +457,7 @@ StatisticianMap ReceiveStatisticsImpl::GetActiveStatisticians() const {
StreamStatistician* ReceiveStatisticsImpl::GetStatistician(
uint32_t ssrc) const {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
StatisticianImplMap::const_iterator it = statisticians_.find(ssrc);
if (it == statisticians_.end())
return NULL;
@@ -409,7 +466,7 @@ StreamStatistician* ReceiveStatisticsImpl::GetStatistician(
void ReceiveStatisticsImpl::SetMaxReorderingThreshold(
int max_reordering_threshold) {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
for (StatisticianImplMap::iterator it = statisticians_.begin();
it != statisticians_.end(); ++it) {
it->second->SetMaxReorderingThreshold(max_reordering_threshold);
@@ -417,7 +474,7 @@ void ReceiveStatisticsImpl::SetMaxReorderingThreshold(
}
int32_t ReceiveStatisticsImpl::Process() {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
for (StatisticianImplMap::iterator it = statisticians_.begin();
it != statisticians_.end(); ++it) {
it->second->ProcessBitrate();
@@ -427,17 +484,50 @@ int32_t ReceiveStatisticsImpl::Process() {
}
int32_t ReceiveStatisticsImpl::TimeUntilNextProcess() {
- CriticalSectionScoped cs(crit_sect_.get());
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
int time_since_last_update = clock_->TimeInMilliseconds() -
last_rate_update_ms_;
return std::max(kStatisticsProcessIntervalMs - time_since_last_update, 0);
}
+void ReceiveStatisticsImpl::RegisterRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback) {
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
+ if (callback != NULL)
+ assert(rtcp_stats_callback_ == NULL);
+ rtcp_stats_callback_ = callback;
+}
+
+void ReceiveStatisticsImpl::StatisticsUpdated(const RtcpStatistics& statistics,
+ uint32_t ssrc) {
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
+ if (rtcp_stats_callback_) {
+ rtcp_stats_callback_->StatisticsUpdated(statistics, ssrc);
+ }
+}
+
+void ReceiveStatisticsImpl::RegisterRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) {
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
+ if (callback != NULL)
+ assert(rtp_stats_callback_ == NULL);
+ rtp_stats_callback_ = callback;
+}
+
+void ReceiveStatisticsImpl::DataCountersUpdated(const StreamDataCounters& stats,
+ uint32_t ssrc) {
+ CriticalSectionScoped cs(receive_statistics_lock_.get());
+ if (rtp_stats_callback_) {
+ rtp_stats_callback_->DataCountersUpdated(stats, ssrc);
+ }
+}
void NullReceiveStatistics::IncomingPacket(const RTPHeader& rtp_header,
size_t bytes,
bool retransmitted) {}
+void NullReceiveStatistics::FecPacketReceived(uint32_t ssrc) {}
+
StatisticianMap NullReceiveStatistics::GetActiveStatisticians() const {
return StatisticianMap();
}
@@ -454,4 +544,10 @@ int32_t NullReceiveStatistics::TimeUntilNextProcess() { return 0; }
int32_t NullReceiveStatistics::Process() { return 0; }
+void NullReceiveStatistics::RegisterRtcpStatisticsCallback(
+ RtcpStatisticsCallback* callback) {}
+
+void NullReceiveStatistics::RegisterRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) {}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h
index 0af074c4a13..4aa41f349e5 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.h
@@ -25,37 +25,48 @@ class CriticalSectionWrapper;
class StreamStatisticianImpl : public StreamStatistician {
public:
- explicit StreamStatisticianImpl(Clock* clock);
-
+ StreamStatisticianImpl(Clock* clock,
+ RtcpStatisticsCallback* rtcp_callback,
+ StreamDataCountersCallback* rtp_callback);
virtual ~StreamStatisticianImpl() {}
- virtual bool GetStatistics(Statistics* statistics, bool reset) OVERRIDE;
+ virtual bool GetStatistics(RtcpStatistics* statistics, bool reset) OVERRIDE;
virtual void GetDataCounters(uint32_t* bytes_received,
uint32_t* packets_received) const OVERRIDE;
virtual uint32_t BitrateReceived() const OVERRIDE;
virtual void ResetStatistics() OVERRIDE;
virtual bool IsRetransmitOfOldPacket(const RTPHeader& header,
- int min_rtt) const OVERRIDE;
+ int min_rtt) const OVERRIDE;
virtual bool IsPacketInOrder(uint16_t sequence_number) const OVERRIDE;
- void IncomingPacket(const RTPHeader& rtp_header, size_t bytes,
+ void IncomingPacket(const RTPHeader& rtp_header,
+ size_t bytes,
bool retransmitted);
+ void FecPacketReceived();
void SetMaxReorderingThreshold(int max_reordering_threshold);
void ProcessBitrate();
virtual void LastReceiveTimeNtp(uint32_t* secs, uint32_t* frac) const;
private:
bool InOrderPacketInternal(uint16_t sequence_number) const;
+ RtcpStatistics CalculateRtcpStatistics();
+ void UpdateJitter(const RTPHeader& header,
+ uint32_t receive_time_secs,
+ uint32_t receive_time_frac);
+ void UpdateCounters(const RTPHeader& rtp_header,
+ size_t bytes,
+ bool retransmitted);
+ void NotifyRtpCallback() LOCKS_EXCLUDED(stream_lock_.get());
+ void NotifyRtcpCallback() LOCKS_EXCLUDED(stream_lock_.get());
Clock* clock_;
- scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ scoped_ptr<CriticalSectionWrapper> stream_lock_;
Bitrate incoming_bitrate_;
uint32_t ssrc_;
int max_reordering_threshold_; // In number of packets or sequence numbers.
// Stats on received RTP packets.
uint32_t jitter_q4_;
- uint32_t jitter_max_q4_;
uint32_t cumulative_loss_;
uint32_t jitter_q4_transmission_time_offset_;
@@ -67,30 +78,34 @@ class StreamStatisticianImpl : public StreamStatistician {
uint16_t received_seq_first_;
uint16_t received_seq_max_;
uint16_t received_seq_wraps_;
- bool first_packet_;
// Current counter values.
uint16_t received_packet_overhead_;
- uint32_t received_byte_count_;
- uint32_t received_retransmitted_packets_;
- uint32_t received_inorder_packet_count_;
+ StreamDataCounters receive_counters_;
// Counter values when we sent the last report.
uint32_t last_report_inorder_packets_;
uint32_t last_report_old_packets_;
uint16_t last_report_seq_max_;
- Statistics last_reported_statistics_;
+ RtcpStatistics last_reported_statistics_;
+
+ RtcpStatisticsCallback* const rtcp_callback_;
+ StreamDataCountersCallback* const rtp_callback_;
};
-class ReceiveStatisticsImpl : public ReceiveStatistics {
+class ReceiveStatisticsImpl : public ReceiveStatistics,
+ public RtcpStatisticsCallback,
+ public StreamDataCountersCallback {
public:
explicit ReceiveStatisticsImpl(Clock* clock);
~ReceiveStatisticsImpl();
// Implement ReceiveStatistics.
- virtual void IncomingPacket(const RTPHeader& header, size_t bytes,
- bool old_packet) OVERRIDE;
+ virtual void IncomingPacket(const RTPHeader& header,
+ size_t bytes,
+ bool retransmitted) OVERRIDE;
+ virtual void FecPacketReceived(uint32_t ssrc) OVERRIDE;
virtual StatisticianMap GetActiveStatisticians() const OVERRIDE;
virtual StreamStatistician* GetStatistician(uint32_t ssrc) const OVERRIDE;
virtual void SetMaxReorderingThreshold(int max_reordering_threshold) OVERRIDE;
@@ -101,13 +116,27 @@ class ReceiveStatisticsImpl : public ReceiveStatistics {
void ChangeSsrc(uint32_t from_ssrc, uint32_t to_ssrc);
+ virtual void RegisterRtcpStatisticsCallback(RtcpStatisticsCallback* callback)
+ OVERRIDE;
+
+ virtual void RegisterRtpStatisticsCallback(
+ StreamDataCountersCallback* callback) OVERRIDE;
+
private:
+ virtual void StatisticsUpdated(const RtcpStatistics& statistics,
+ uint32_t ssrc) OVERRIDE;
+ virtual void DataCountersUpdated(const StreamDataCounters& counters,
+ uint32_t ssrc) OVERRIDE;
+
typedef std::map<uint32_t, StreamStatisticianImpl*> StatisticianImplMap;
Clock* clock_;
- scoped_ptr<CriticalSectionWrapper> crit_sect_;
+ scoped_ptr<CriticalSectionWrapper> receive_statistics_lock_;
int64_t last_rate_update_ms_;
StatisticianImplMap statisticians_;
+
+ RtcpStatisticsCallback* rtcp_stats_callback_;
+ StreamDataCountersCallback* rtp_stats_callback_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RECEIVE_STATISTICS_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc
index be8f2fcdcc9..f0b9dedde57 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/receive_statistics_unittest.cc
@@ -28,10 +28,10 @@ class ReceiveStatisticsTest : public ::testing::Test {
receive_statistics_(ReceiveStatistics::Create(&clock_)) {
memset(&header1_, 0, sizeof(header1_));
header1_.ssrc = kSsrc1;
- header1_.sequenceNumber = 0;
+ header1_.sequenceNumber = 100;
memset(&header2_, 0, sizeof(header2_));
header2_.ssrc = kSsrc2;
- header2_.sequenceNumber = 0;
+ header2_.sequenceNumber = 100;
}
protected:
@@ -131,4 +131,173 @@ TEST_F(ReceiveStatisticsTest, ActiveStatisticians) {
EXPECT_EQ(200u, bytes_received);
EXPECT_EQ(2u, packets_received);
}
+
+TEST_F(ReceiveStatisticsTest, RtcpCallbacks) {
+ class TestCallback : public RtcpStatisticsCallback {
+ public:
+ TestCallback()
+ : RtcpStatisticsCallback(), num_calls_(0), ssrc_(0), stats_() {}
+ virtual ~TestCallback() {}
+
+ virtual void StatisticsUpdated(const RtcpStatistics& statistics,
+ uint32_t ssrc) {
+ ssrc_ = ssrc;
+ stats_ = statistics;
+ ++num_calls_;
+ }
+
+ uint32_t num_calls_;
+ uint32_t ssrc_;
+ RtcpStatistics stats_;
+ } callback;
+
+ receive_statistics_->RegisterRtcpStatisticsCallback(&callback);
+
+ // Add some arbitrary data, with loss and jitter.
+ header1_.sequenceNumber = 1;
+ clock_.AdvanceTimeMilliseconds(7);
+ header1_.timestamp += 3;
+ receive_statistics_->IncomingPacket(header1_, kPacketSize1, false);
+ header1_.sequenceNumber += 2;
+ clock_.AdvanceTimeMilliseconds(9);
+ header1_.timestamp += 9;
+ receive_statistics_->IncomingPacket(header1_, kPacketSize1, false);
+ --header1_.sequenceNumber;
+ clock_.AdvanceTimeMilliseconds(13);
+ header1_.timestamp += 47;
+ receive_statistics_->IncomingPacket(header1_, kPacketSize1, true);
+ header1_.sequenceNumber += 3;
+ clock_.AdvanceTimeMilliseconds(11);
+ header1_.timestamp += 17;
+ receive_statistics_->IncomingPacket(header1_, kPacketSize1, false);
+ ++header1_.sequenceNumber;
+
+ EXPECT_EQ(0u, callback.num_calls_);
+
+ // Call GetStatistics, simulating a timed rtcp sender thread.
+ RtcpStatistics statistics;
+ receive_statistics_->GetStatistician(kSsrc1)
+ ->GetStatistics(&statistics, true);
+
+ EXPECT_EQ(1u, callback.num_calls_);
+ EXPECT_EQ(callback.ssrc_, kSsrc1);
+ EXPECT_EQ(statistics.cumulative_lost, callback.stats_.cumulative_lost);
+ EXPECT_EQ(statistics.extended_max_sequence_number,
+ callback.stats_.extended_max_sequence_number);
+ EXPECT_EQ(statistics.fraction_lost, callback.stats_.fraction_lost);
+ EXPECT_EQ(statistics.jitter, callback.stats_.jitter);
+ EXPECT_EQ(51, statistics.fraction_lost);
+ EXPECT_EQ(1u, statistics.cumulative_lost);
+ EXPECT_EQ(5u, statistics.extended_max_sequence_number);
+ EXPECT_EQ(4u, statistics.jitter);
+
+ receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
+
+ // Add some more data.
+ header1_.sequenceNumber = 1;
+ clock_.AdvanceTimeMilliseconds(7);
+ header1_.timestamp += 3;
+ receive_statistics_->IncomingPacket(header1_, kPacketSize1, false);
+ header1_.sequenceNumber += 2;
+ clock_.AdvanceTimeMilliseconds(9);
+ header1_.timestamp += 9;
+ receive_statistics_->IncomingPacket(header1_, kPacketSize1, false);
+ --header1_.sequenceNumber;
+ clock_.AdvanceTimeMilliseconds(13);
+ header1_.timestamp += 47;
+ receive_statistics_->IncomingPacket(header1_, kPacketSize1, true);
+ header1_.sequenceNumber += 3;
+ clock_.AdvanceTimeMilliseconds(11);
+ header1_.timestamp += 17;
+ receive_statistics_->IncomingPacket(header1_, kPacketSize1, false);
+ ++header1_.sequenceNumber;
+
+ receive_statistics_->GetStatistician(kSsrc1)
+ ->GetStatistics(&statistics, true);
+
+ // Should not have been called after deregister.
+ EXPECT_EQ(1u, callback.num_calls_);
+}
+
+TEST_F(ReceiveStatisticsTest, RtpCallbacks) {
+ class TestCallback : public StreamDataCountersCallback {
+ public:
+ TestCallback()
+ : StreamDataCountersCallback(), num_calls_(0), ssrc_(0), stats_() {}
+ virtual ~TestCallback() {}
+
+ virtual void DataCountersUpdated(const StreamDataCounters& counters,
+ uint32_t ssrc) {
+ ssrc_ = ssrc;
+ stats_ = counters;
+ ++num_calls_;
+ }
+
+ void ExpectMatches(uint32_t num_calls,
+ uint32_t ssrc,
+ uint32_t bytes,
+ uint32_t padding,
+ uint32_t packets,
+ uint32_t retransmits,
+ uint32_t fec) {
+ EXPECT_EQ(num_calls, num_calls_);
+ EXPECT_EQ(ssrc, ssrc_);
+ EXPECT_EQ(bytes, stats_.bytes);
+ EXPECT_EQ(padding, stats_.padding_bytes);
+ EXPECT_EQ(packets, stats_.packets);
+ EXPECT_EQ(retransmits, stats_.retransmitted_packets);
+ EXPECT_EQ(fec, stats_.fec_packets);
+ }
+
+ uint32_t num_calls_;
+ uint32_t ssrc_;
+ StreamDataCounters stats_;
+ } callback;
+
+ receive_statistics_->RegisterRtpStatisticsCallback(&callback);
+
+ const uint32_t kHeaderLength = 20;
+ const uint32_t kPaddingLength = 9;
+
+ // One packet of size kPacketSize1.
+ header1_.headerLength = kHeaderLength;
+ receive_statistics_->IncomingPacket(
+ header1_, kPacketSize1 + kHeaderLength, false);
+ callback.ExpectMatches(1, kSsrc1, kPacketSize1, 0, 1, 0, 0);
+
+ ++header1_.sequenceNumber;
+ clock_.AdvanceTimeMilliseconds(5);
+ header1_.paddingLength = 9;
+ // Another packet of size kPacketSize1 with 9 bytes padding.
+ receive_statistics_->IncomingPacket(
+ header1_, kPacketSize1 + kHeaderLength + kPaddingLength, false);
+ callback.ExpectMatches(2, kSsrc1, 2 * kPacketSize1, kPaddingLength, 2, 0, 0);
+
+ clock_.AdvanceTimeMilliseconds(5);
+ // Retransmit last packet.
+ receive_statistics_->IncomingPacket(
+ header1_, kPacketSize1 + kHeaderLength + kPaddingLength, true);
+ callback.ExpectMatches(
+ 3, kSsrc1, 3 * kPacketSize1, kPaddingLength * 2, 3, 1, 0);
+
+ header1_.paddingLength = 0;
+ ++header1_.sequenceNumber;
+ clock_.AdvanceTimeMilliseconds(5);
+ // One recovered packet.
+ receive_statistics_->IncomingPacket(
+ header1_, kPacketSize1 + kHeaderLength, false);
+ receive_statistics_->FecPacketReceived(kSsrc1);
+ callback.ExpectMatches(
+ 5, kSsrc1, 4 * kPacketSize1, kPaddingLength * 2, 4, 1, 1);
+
+ receive_statistics_->RegisterRtpStatisticsCallback(NULL);
+
+ // New stats, but callback should not be called.
+ ++header1_.sequenceNumber;
+ clock_.AdvanceTimeMilliseconds(5);
+ receive_statistics_->IncomingPacket(
+ header1_, kPacketSize1 + kHeaderLength, true);
+ callback.ExpectMatches(
+ 5, kSsrc1, 4 * kPacketSize1, kPaddingLength * 2, 4, 1, 1);
+}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc
new file mode 100644
index 00000000000..0d71c26b63c
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator.cc
@@ -0,0 +1,85 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h"
+
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "webrtc/system_wrappers/interface/clock.h"
+#include "webrtc/system_wrappers/interface/timestamp_extrapolator.h"
+
+namespace webrtc {
+
+// TODO(wu): Refactor this class so that it can be shared with
+// vie_sync_module.cc.
+RemoteNtpTimeEstimator::RemoteNtpTimeEstimator(Clock* clock)
+ : clock_(clock),
+ ts_extrapolator_(
+ new TimestampExtrapolator(clock_->TimeInMilliseconds())) {
+}
+
+RemoteNtpTimeEstimator::~RemoteNtpTimeEstimator() {}
+
+bool RemoteNtpTimeEstimator::UpdateRtcpTimestamp(uint32_t ssrc,
+ RtpRtcp* rtp_rtcp) {
+ assert(rtp_rtcp);
+ uint16_t rtt = 0;
+ rtp_rtcp->RTT(ssrc, &rtt, NULL, NULL, NULL);
+ if (rtt == 0) {
+ // Waiting for valid rtt.
+ return true;
+ }
+ // Update RTCP list
+ uint32_t ntp_secs = 0;
+ uint32_t ntp_frac = 0;
+ uint32_t rtp_timestamp = 0;
+ if (0 != rtp_rtcp->RemoteNTP(&ntp_secs,
+ &ntp_frac,
+ NULL,
+ NULL,
+ &rtp_timestamp)) {
+ // Waiting for RTCP.
+ return true;
+ }
+ bool new_rtcp_sr = false;
+ if (!UpdateRtcpList(
+ ntp_secs, ntp_frac, rtp_timestamp, &rtcp_list_, &new_rtcp_sr)) {
+ return false;
+ }
+ if (!new_rtcp_sr) {
+ // No new RTCP SR since last time this function was called.
+ return true;
+ }
+ // Update extrapolator with the new arrival time.
+ // The extrapolator assumes the TimeInMilliseconds time.
+ int64_t receiver_arrival_time_ms = clock_->TimeInMilliseconds();
+ int64_t sender_send_time_ms = Clock::NtpToMs(ntp_secs, ntp_frac);
+ int64_t sender_arrival_time_90k = (sender_send_time_ms + rtt / 2) * 90;
+ ts_extrapolator_->Update(receiver_arrival_time_ms, sender_arrival_time_90k);
+ return true;
+}
+
+int64_t RemoteNtpTimeEstimator::Estimate(uint32_t rtp_timestamp) {
+ if (rtcp_list_.size() < 2) {
+ // We need two RTCP SR reports to calculate NTP.
+ return -1;
+ }
+ int64_t sender_capture_ntp_ms = 0;
+ if (!RtpToNtpMs(rtp_timestamp, rtcp_list_, &sender_capture_ntp_ms)) {
+ return -1;
+ }
+ uint32_t timestamp = sender_capture_ntp_ms * 90;
+ int64_t receiver_capture_ms =
+ ts_extrapolator_->ExtrapolateLocalTime(timestamp);
+ int64_t ntp_offset =
+ clock_->CurrentNtpInMilliseconds() - clock_->TimeInMilliseconds();
+ return receiver_capture_ms + ntp_offset;
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc
new file mode 100644
index 00000000000..63cedf03afe
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/remote_ntp_time_estimator_unittest.cc
@@ -0,0 +1,112 @@
+/*
+* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+*
+* Use of this source code is governed by a BSD-style license
+* that can be found in the LICENSE file in the root of the source
+* tree. An additional intellectual property rights grant can be found
+* in the file PATENTS. All contributing project authors may
+* be found in the AUTHORS file in the root of the source tree.
+*/
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h"
+#include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
+
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::Return;
+using ::testing::SetArgPointee;
+
+namespace webrtc {
+
+static const int kTestRtt = 10;
+static const int64_t kLocalClockInitialTimeMs = 123;
+static const int64_t kRemoteClockInitialTimeMs = 345;
+static const uint32_t kTimestampOffset = 567;
+static const int kTestSsrc = 789;
+
+class RemoteNtpTimeEstimatorTest : public ::testing::Test {
+ protected:
+ RemoteNtpTimeEstimatorTest()
+ : local_clock_(kLocalClockInitialTimeMs * 1000),
+ remote_clock_(kRemoteClockInitialTimeMs * 1000),
+ estimator_(&local_clock_) {}
+ ~RemoteNtpTimeEstimatorTest() {}
+
+ void AdvanceTimeMilliseconds(int64_t ms) {
+ local_clock_.AdvanceTimeMilliseconds(ms);
+ remote_clock_.AdvanceTimeMilliseconds(ms);
+ }
+
+ uint32_t GetRemoteTimestamp() {
+ return static_cast<uint32_t>(remote_clock_.TimeInMilliseconds()) * 90 +
+ kTimestampOffset;
+ }
+
+ void SendRtcpSr() {
+ uint32_t rtcp_timestamp = GetRemoteTimestamp();
+ uint32_t ntp_seconds;
+ uint32_t ntp_fractions;
+ remote_clock_.CurrentNtp(ntp_seconds, ntp_fractions);
+
+ AdvanceTimeMilliseconds(kTestRtt / 2);
+ ReceiveRtcpSr(rtcp_timestamp, ntp_seconds, ntp_fractions);
+ }
+
+ void UpdateRtcpTimestamp(MockRtpRtcp* rtp_rtcp, bool expected_result) {
+ if (rtp_rtcp) {
+ EXPECT_CALL(*rtp_rtcp, RTT(_, _, _, _, _))
+ .WillOnce(DoAll(SetArgPointee<1>(kTestRtt),
+ Return(0)));
+ }
+ EXPECT_EQ(expected_result,
+ estimator_.UpdateRtcpTimestamp(kTestSsrc, rtp_rtcp));
+ }
+
+ void ReceiveRtcpSr(uint32_t rtcp_timestamp,
+ uint32_t ntp_seconds,
+ uint32_t ntp_fractions) {
+ EXPECT_CALL(rtp_rtcp_, RemoteNTP(_, _, _, _, _))
+ .WillOnce(DoAll(SetArgPointee<0>(ntp_seconds),
+ SetArgPointee<1>(ntp_fractions),
+ SetArgPointee<4>(rtcp_timestamp),
+ Return(0)));
+
+ UpdateRtcpTimestamp(&rtp_rtcp_, true);
+ }
+
+ SimulatedClock local_clock_;
+ SimulatedClock remote_clock_;
+ MockRtpRtcp rtp_rtcp_;
+ RemoteNtpTimeEstimator estimator_;
+};
+
+TEST_F(RemoteNtpTimeEstimatorTest, Estimate) {
+ // Failed without any RTCP SR, where RemoteNTP returns without valid NTP.
+ EXPECT_CALL(rtp_rtcp_, RemoteNTP(_, _, _, _, _)).WillOnce(Return(0));
+ UpdateRtcpTimestamp(&rtp_rtcp_, false);
+
+ AdvanceTimeMilliseconds(1000);
+ // Remote peer sends first RTCP SR.
+ SendRtcpSr();
+
+ // Remote sends a RTP packet.
+ AdvanceTimeMilliseconds(15);
+ uint32_t rtp_timestamp = GetRemoteTimestamp();
+ int64_t capture_ntp_time_ms = local_clock_.CurrentNtpInMilliseconds();
+
+ // Local peer needs at least 2 RTCP SR to calculate the capture time.
+ const int64_t kNotEnoughRtcpSr = -1;
+ EXPECT_EQ(kNotEnoughRtcpSr, estimator_.Estimate(rtp_timestamp));
+
+ AdvanceTimeMilliseconds(800);
+ // Remote sends second RTCP SR.
+ SendRtcpSr();
+
+ // Local peer gets enough RTCP SR to calculate the capture time.
+ EXPECT_EQ(capture_ntp_time_ms, estimator_.Estimate(rtp_timestamp));
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
index 68f61371395..88463e4718f 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
@@ -69,6 +69,7 @@ class RtcpFormatRembTest : public ::testing::Test {
RemoteBitrateEstimatorFactory().Create(
&remote_bitrate_observer_,
system_clock_,
+ kMimdControl,
kRemoteBitrateEstimatorMinBitrateBps)) {}
virtual void SetUp();
virtual void TearDown();
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet.cc
new file mode 100644
index 00000000000..a4cdfd95dee
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet.cc
@@ -0,0 +1,695 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
+
+#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
+#include "webrtc/system_wrappers/interface/logging.h"
+
+using webrtc::RTCPUtility::PT_APP;
+using webrtc::RTCPUtility::PT_BYE;
+using webrtc::RTCPUtility::PT_IJ;
+using webrtc::RTCPUtility::PT_PSFB;
+using webrtc::RTCPUtility::PT_RR;
+using webrtc::RTCPUtility::PT_RTPFB;
+using webrtc::RTCPUtility::PT_SDES;
+using webrtc::RTCPUtility::PT_SR;
+using webrtc::RTCPUtility::PT_XR;
+
+using webrtc::RTCPUtility::RTCPPacketAPP;
+using webrtc::RTCPUtility::RTCPPacketBYE;
+using webrtc::RTCPUtility::RTCPPacketPSFBAPP;
+using webrtc::RTCPUtility::RTCPPacketPSFBFIR;
+using webrtc::RTCPUtility::RTCPPacketPSFBFIRItem;
+using webrtc::RTCPUtility::RTCPPacketPSFBPLI;
+using webrtc::RTCPUtility::RTCPPacketPSFBREMBItem;
+using webrtc::RTCPUtility::RTCPPacketPSFBRPSI;
+using webrtc::RTCPUtility::RTCPPacketPSFBSLI;
+using webrtc::RTCPUtility::RTCPPacketPSFBSLIItem;
+using webrtc::RTCPUtility::RTCPPacketReportBlockItem;
+using webrtc::RTCPUtility::RTCPPacketRR;
+using webrtc::RTCPUtility::RTCPPacketRTPFBNACK;
+using webrtc::RTCPUtility::RTCPPacketRTPFBNACKItem;
+using webrtc::RTCPUtility::RTCPPacketRTPFBTMMBN;
+using webrtc::RTCPUtility::RTCPPacketRTPFBTMMBNItem;
+using webrtc::RTCPUtility::RTCPPacketRTPFBTMMBR;
+using webrtc::RTCPUtility::RTCPPacketRTPFBTMMBRItem;
+using webrtc::RTCPUtility::RTCPPacketSR;
+using webrtc::RTCPUtility::RTCPPacketXRDLRRReportBlockItem;
+using webrtc::RTCPUtility::RTCPPacketXRReceiverReferenceTimeItem;
+using webrtc::RTCPUtility::RTCPPacketXR;
+using webrtc::RTCPUtility::RTCPPacketXRVOIPMetricItem;
+
+namespace webrtc {
+namespace rtcp {
+namespace {
+// Unused SSRC of media source, set to 0.
+const uint32_t kUnusedMediaSourceSsrc0 = 0;
+
+void AssignUWord8(uint8_t* buffer, size_t* offset, uint8_t value) {
+ buffer[(*offset)++] = value;
+}
+void AssignUWord16(uint8_t* buffer, size_t* offset, uint16_t value) {
+ ModuleRTPUtility::AssignUWord16ToBuffer(buffer + *offset, value);
+ *offset += 2;
+}
+void AssignUWord24(uint8_t* buffer, size_t* offset, uint32_t value) {
+ ModuleRTPUtility::AssignUWord24ToBuffer(buffer + *offset, value);
+ *offset += 3;
+}
+void AssignUWord32(uint8_t* buffer, size_t* offset, uint32_t value) {
+ ModuleRTPUtility::AssignUWord32ToBuffer(buffer + *offset, value);
+ *offset += 4;
+}
+
+size_t BlockToHeaderLength(size_t length_in_bytes) {
+ // Length in 32-bit words minus 1.
+ assert(length_in_bytes > 0);
+ assert(length_in_bytes % 4 == 0);
+ return (length_in_bytes / 4) - 1;
+}
+
+// From RFC 3550, RTP: A Transport Protocol for Real-Time Applications.
+//
+// RTP header format.
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| RC/FMT | PT | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateHeader(uint8_t count_or_format, // Depends on packet type.
+ uint8_t packet_type,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ assert(length <= 0xffff);
+ const uint8_t kVersion = 2;
+ AssignUWord8(buffer, pos, (kVersion << 6) + count_or_format);
+ AssignUWord8(buffer, pos, packet_type);
+ AssignUWord16(buffer, pos, length);
+}
+
+// Sender report (SR) (RFC 3550).
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| RC | PT=SR=200 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of sender |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | NTP timestamp, most significant word |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | NTP timestamp, least significant word |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | RTP timestamp |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | sender's packet count |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | sender's octet count |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+
+void CreateSenderReport(const RTCPPacketSR& sr,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ CreateHeader(sr.NumberOfReportBlocks, PT_SR, length, buffer, pos);
+ AssignUWord32(buffer, pos, sr.SenderSSRC);
+ AssignUWord32(buffer, pos, sr.NTPMostSignificant);
+ AssignUWord32(buffer, pos, sr.NTPLeastSignificant);
+ AssignUWord32(buffer, pos, sr.RTPTimestamp);
+ AssignUWord32(buffer, pos, sr.SenderPacketCount);
+ AssignUWord32(buffer, pos, sr.SenderOctetCount);
+}
+
+// Receiver report (RR), header (RFC 3550).
+//
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| RC | PT=RR=201 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of packet sender |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+
+void CreateReceiverReport(const RTCPPacketRR& rr,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ CreateHeader(rr.NumberOfReportBlocks, PT_RR, length, buffer, pos);
+ AssignUWord32(buffer, pos, rr.SenderSSRC);
+}
+
+// Report block (RFC 3550).
+//
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | SSRC_1 (SSRC of first source) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | fraction lost | cumulative number of packets lost |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | extended highest sequence number received |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | interarrival jitter |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | last SR (LSR) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | delay since last SR (DLSR) |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+
+void CreateReportBlocks(const std::vector<RTCPPacketReportBlockItem>& blocks,
+ uint8_t* buffer,
+ size_t* pos) {
+ for (std::vector<RTCPPacketReportBlockItem>::const_iterator
+ it = blocks.begin(); it != blocks.end(); ++it) {
+ AssignUWord32(buffer, pos, (*it).SSRC);
+ AssignUWord8(buffer, pos, (*it).FractionLost);
+ AssignUWord24(buffer, pos, (*it).CumulativeNumOfPacketsLost);
+ AssignUWord32(buffer, pos, (*it).ExtendedHighestSequenceNumber);
+ AssignUWord32(buffer, pos, (*it).Jitter);
+ AssignUWord32(buffer, pos, (*it).LastSR);
+ AssignUWord32(buffer, pos, (*it).DelayLastSR);
+ }
+}
+
+// Transmission Time Offsets in RTP Streams (RFC 5450).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// hdr |V=2|P| RC | PT=IJ=195 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | inter-arrival jitter |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// . .
+// . .
+// . .
+// | inter-arrival jitter |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateIj(const std::vector<uint32_t>& ij_items,
+ uint8_t* buffer,
+ size_t* pos) {
+ size_t length = ij_items.size();
+ CreateHeader(length, PT_IJ, length, buffer, pos);
+ for (std::vector<uint32_t>::const_iterator it = ij_items.begin();
+ it != ij_items.end(); ++it) {
+ AssignUWord32(buffer, pos, *it);
+ }
+}
+
+// Source Description (SDES) (RFC 3550).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// header |V=2|P| SC | PT=SDES=202 | length |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// chunk | SSRC/CSRC_1 |
+// 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SDES items |
+// | ... |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// chunk | SSRC/CSRC_2 |
+// 2 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SDES items |
+// | ... |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+//
+// Canonical End-Point Identifier SDES Item (CNAME)
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | CNAME=1 | length | user and domain name ...
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateSdes(const std::vector<Sdes::Chunk>& chunks,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ CreateHeader(chunks.size(), PT_SDES, length, buffer, pos);
+ const uint8_t kSdesItemType = 1;
+ for (std::vector<Sdes::Chunk>::const_iterator it = chunks.begin();
+ it != chunks.end(); ++it) {
+ AssignUWord32(buffer, pos, (*it).ssrc);
+ AssignUWord8(buffer, pos, kSdesItemType);
+ AssignUWord8(buffer, pos, (*it).name.length());
+ memcpy(buffer + *pos, (*it).name.data(), (*it).name.length());
+ *pos += (*it).name.length();
+ memset(buffer + *pos, 0, (*it).null_octets);
+ *pos += (*it).null_octets;
+ }
+}
+
+// Bye packet (BYE) (RFC 3550).
+//
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| SC | PT=BYE=203 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC/CSRC |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// : ... :
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// (opt) | length | reason for leaving ...
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateBye(const RTCPPacketBYE& bye,
+ const std::vector<uint32_t>& csrcs,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ CreateHeader(length, PT_BYE, length, buffer, pos);
+ AssignUWord32(buffer, pos, bye.SenderSSRC);
+ for (std::vector<uint32_t>::const_iterator it = csrcs.begin();
+ it != csrcs.end(); ++it) {
+ AssignUWord32(buffer, pos, *it);
+ }
+}
+
+// Application-Defined packet (APP) (RFC 3550).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| subtype | PT=APP=204 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC/CSRC |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | name (ASCII) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | application-dependent data ...
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateApp(const RTCPPacketAPP& app,
+ uint32_t ssrc,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ CreateHeader(app.SubType, PT_APP, length, buffer, pos);
+ AssignUWord32(buffer, pos, ssrc);
+ AssignUWord32(buffer, pos, app.Name);
+ memcpy(buffer + *pos, app.Data, app.Size);
+ *pos += app.Size;
+}
+
+// RFC 4585: Feedback format.
+//
+// Common packet format:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| FMT | PT | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of packet sender |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of media source |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// : Feedback Control Information (FCI) :
+// :
+//
+
+// Picture loss indication (PLI) (RFC 4585).
+//
+// FCI: no feedback control information.
+
+void CreatePli(const RTCPPacketPSFBPLI& pli,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ const uint8_t kFmt = 1;
+ CreateHeader(kFmt, PT_PSFB, length, buffer, pos);
+ AssignUWord32(buffer, pos, pli.SenderSSRC);
+ AssignUWord32(buffer, pos, pli.MediaSSRC);
+}
+
+// Slice loss indication (SLI) (RFC 4585).
+//
+// FCI:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | First | Number | PictureID |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateSli(const RTCPPacketPSFBSLI& sli,
+ const RTCPPacketPSFBSLIItem& sli_item,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ const uint8_t kFmt = 2;
+ CreateHeader(kFmt, PT_PSFB, length, buffer, pos);
+ AssignUWord32(buffer, pos, sli.SenderSSRC);
+ AssignUWord32(buffer, pos, sli.MediaSSRC);
+
+ AssignUWord8(buffer, pos, sli_item.FirstMB >> 5);
+ AssignUWord8(buffer, pos, (sli_item.FirstMB << 3) +
+ ((sli_item.NumberOfMB >> 10) & 0x07));
+ AssignUWord8(buffer, pos, sli_item.NumberOfMB >> 2);
+ AssignUWord8(buffer, pos, (sli_item.NumberOfMB << 6) + sli_item.PictureId);
+}
+
+// Generic NACK (RFC 4585).
+//
+// FCI:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | PID | BLP |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateNack(const RTCPPacketRTPFBNACK& nack,
+ const std::vector<RTCPPacketRTPFBNACKItem>& nack_fields,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ const uint8_t kFmt = 1;
+ CreateHeader(kFmt, PT_RTPFB, length, buffer, pos);
+ AssignUWord32(buffer, pos, nack.SenderSSRC);
+ AssignUWord32(buffer, pos, nack.MediaSSRC);
+ for (std::vector<RTCPPacketRTPFBNACKItem>::const_iterator
+ it = nack_fields.begin(); it != nack_fields.end(); ++it) {
+ AssignUWord16(buffer, pos, (*it).PacketID);
+ AssignUWord16(buffer, pos, (*it).BitMask);
+ }
+}
+
+// Reference picture selection indication (RPSI) (RFC 4585).
+//
+// FCI:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | PB |0| Payload Type| Native RPSI bit string |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | defined per codec ... | Padding (0) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateRpsi(const RTCPPacketPSFBRPSI& rpsi,
+ uint8_t padding_bytes,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ // Native bit string should be a multiple of 8 bits.
+ assert(rpsi.NumberOfValidBits % 8 == 0);
+ const uint8_t kFmt = 3;
+ CreateHeader(kFmt, PT_PSFB, length, buffer, pos);
+ AssignUWord32(buffer, pos, rpsi.SenderSSRC);
+ AssignUWord32(buffer, pos, rpsi.MediaSSRC);
+ AssignUWord8(buffer, pos, padding_bytes * 8);
+ AssignUWord8(buffer, pos, rpsi.PayloadType);
+ memcpy(buffer + *pos, rpsi.NativeBitString, rpsi.NumberOfValidBits / 8);
+ *pos += rpsi.NumberOfValidBits / 8;
+ memset(buffer + *pos, 0, padding_bytes);
+ *pos += padding_bytes;
+}
+
+// Full intra request (FIR) (RFC 5104).
+//
+// FCI:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | Seq nr. | Reserved |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+void CreateFir(const RTCPPacketPSFBFIR& fir,
+ const RTCPPacketPSFBFIRItem& fir_item,
+ size_t length,
+ uint8_t* buffer,
+ size_t* pos) {
+ const uint8_t kFmt = 4;
+ CreateHeader(kFmt, PT_PSFB, length, buffer, pos);
+ AssignUWord32(buffer, pos, fir.SenderSSRC);
+ AssignUWord32(buffer, pos, kUnusedMediaSourceSsrc0);
+ AssignUWord32(buffer, pos, fir_item.SSRC);
+ AssignUWord8(buffer, pos, fir_item.CommandSequenceNumber);
+ AssignUWord24(buffer, pos, 0);
+}
+} // namespace
+
+void RtcpPacket::Append(RtcpPacket* packet) {
+ assert(packet);
+ appended_packets_.push_back(packet);
+}
+
+RawPacket RtcpPacket::Build() const {
+ size_t length = 0;
+ uint8_t packet[IP_PACKET_SIZE];
+ CreateAndAddAppended(packet, &length, IP_PACKET_SIZE);
+ return RawPacket(packet, length);
+}
+
+void RtcpPacket::Build(uint8_t* packet,
+ size_t* length,
+ size_t max_length) const {
+ *length = 0;
+ CreateAndAddAppended(packet, length, max_length);
+}
+
+void RtcpPacket::CreateAndAddAppended(uint8_t* packet,
+ size_t* length,
+ size_t max_length) const {
+ Create(packet, length, max_length);
+ for (std::vector<RtcpPacket*>::const_iterator it = appended_packets_.begin();
+ it != appended_packets_.end(); ++it) {
+ (*it)->CreateAndAddAppended(packet, length, max_length);
+ }
+}
+
+void Empty::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+}
+
+void SenderReport::Create(uint8_t* packet,
+ size_t* length,
+ size_t max_length) const {
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateSenderReport(sr_, BlockToHeaderLength(BlockLength()), packet, length);
+ CreateReportBlocks(report_blocks_, packet, length);
+}
+
+void SenderReport::WithReportBlock(ReportBlock* block) {
+ assert(block);
+ if (report_blocks_.size() >= kMaxNumberOfReportBlocks) {
+ LOG(LS_WARNING) << "Max report blocks reached.";
+ return;
+ }
+ report_blocks_.push_back(block->report_block_);
+ sr_.NumberOfReportBlocks = report_blocks_.size();
+}
+
+void ReceiverReport::Create(uint8_t* packet,
+ size_t* length,
+ size_t max_length) const {
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateReceiverReport(rr_, BlockToHeaderLength(BlockLength()), packet, length);
+ CreateReportBlocks(report_blocks_, packet, length);
+}
+
+void ReceiverReport::WithReportBlock(ReportBlock* block) {
+ assert(block);
+ if (report_blocks_.size() >= kMaxNumberOfReportBlocks) {
+ LOG(LS_WARNING) << "Max report blocks reached.";
+ return;
+ }
+ report_blocks_.push_back(block->report_block_);
+ rr_.NumberOfReportBlocks = report_blocks_.size();
+}
+
+void Ij::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateIj(ij_items_, packet, length);
+}
+
+void Ij::WithJitterItem(uint32_t jitter) {
+ if (ij_items_.size() >= kMaxNumberOfIjItems) {
+ LOG(LS_WARNING) << "Max inter-arrival jitter items reached.";
+ return;
+ }
+ ij_items_.push_back(jitter);
+}
+
+void Sdes::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+ assert(!chunks_.empty());
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateSdes(chunks_, BlockToHeaderLength(BlockLength()), packet, length);
+}
+
+void Sdes::WithCName(uint32_t ssrc, std::string cname) {
+ assert(cname.length() <= 0xff);
+ if (chunks_.size() >= kMaxNumberOfChunks) {
+ LOG(LS_WARNING) << "Max SDES chunks reached.";
+ return;
+ }
+ // In each chunk, the list of items must be terminated by one or more null
+ // octets. The next chunk must start on a 32-bit boundary.
+ // CNAME (1 byte) | length (1 byte) | name | padding.
+ int null_octets = 4 - ((2 + cname.length()) % 4);
+ Chunk chunk;
+ chunk.ssrc = ssrc;
+ chunk.name = cname;
+ chunk.null_octets = null_octets;
+ chunks_.push_back(chunk);
+}
+
+size_t Sdes::BlockLength() const {
+ // Header (4 bytes).
+ // Chunk:
+ // SSRC/CSRC (4 bytes) | CNAME (1 byte) | length (1 byte) | name | padding.
+ size_t length = kHeaderLength;
+ for (std::vector<Chunk>::const_iterator it = chunks_.begin();
+ it != chunks_.end(); ++it) {
+ length += 6 + (*it).name.length() + (*it).null_octets;
+ }
+ assert(length % 4 == 0);
+ return length;
+}
+
+void Bye::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateBye(bye_, csrcs_, BlockToHeaderLength(BlockLength()), packet, length);
+}
+
+void Bye::WithCsrc(uint32_t csrc) {
+ if (csrcs_.size() >= kMaxNumberOfCsrcs) {
+ LOG(LS_WARNING) << "Max CSRC size reached.";
+ return;
+ }
+ csrcs_.push_back(csrc);
+}
+
+void App::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateApp(app_, ssrc_, BlockToHeaderLength(BlockLength()), packet, length);
+}
+
+void Pli::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreatePli(pli_, BlockToHeaderLength(BlockLength()), packet, length);
+}
+
+void Sli::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateSli(sli_, sli_item_, BlockToHeaderLength(BlockLength()), packet,
+ length);
+}
+
+void Nack::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+ assert(!nack_fields_.empty());
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateNack(nack_, nack_fields_, BlockToHeaderLength(BlockLength()), packet,
+ length);
+}
+
+void Nack::WithList(const uint16_t* nack_list, int length) {
+ assert(nack_list);
+ assert(nack_fields_.empty());
+ int i = 0;
+ while (i < length) {
+ uint16_t pid = nack_list[i++];
+ // Bitmask specifies losses in any of the 16 packets following the pid.
+ uint16_t bitmask = 0;
+ while (i < length) {
+ int shift = static_cast<uint16_t>(nack_list[i] - pid) - 1;
+ if (shift >= 0 && shift <= 15) {
+ bitmask |= (1 << shift);
+ ++i;
+ } else {
+ break;
+ }
+ }
+ RTCPUtility::RTCPPacketRTPFBNACKItem item;
+ item.PacketID = pid;
+ item.BitMask = bitmask;
+ nack_fields_.push_back(item);
+ }
+}
+
+void Rpsi::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+ assert(rpsi_.NumberOfValidBits > 0);
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateRpsi(rpsi_, padding_bytes_, BlockToHeaderLength(BlockLength()), packet,
+ length);
+}
+
+void Rpsi::WithPictureId(uint64_t picture_id) {
+ const uint32_t kPidBits = 7;
+ const uint64_t k7MsbZeroMask = 0x1ffffffffffffff;
+ uint8_t required_bytes = 0;
+ uint64_t shifted_pid = picture_id;
+ do {
+ ++required_bytes;
+ shifted_pid = (shifted_pid >> kPidBits) & k7MsbZeroMask;
+ } while (shifted_pid > 0);
+
+ // Convert picture id to native bit string (natively defined by the video
+ // codec).
+ int pos = 0;
+ for (int i = required_bytes - 1; i > 0; i--) {
+ rpsi_.NativeBitString[pos++] =
+ 0x80 | static_cast<uint8_t>(picture_id >> (i * kPidBits));
+ }
+ rpsi_.NativeBitString[pos++] = static_cast<uint8_t>(picture_id & 0x7f);
+ rpsi_.NumberOfValidBits = pos * 8;
+
+ // Calculate padding bytes (to reach next 32-bit boundary, 1, 2 or 3 bytes).
+ padding_bytes_ = 4 - ((2 + required_bytes) % 4);
+ if (padding_bytes_ == 4) {
+ padding_bytes_ = 0;
+ }
+}
+
+void Fir::Create(uint8_t* packet, size_t* length, size_t max_length) const {
+ if (*length + BlockLength() > max_length) {
+ LOG(LS_WARNING) << "Max packet size reached.";
+ return;
+ }
+ CreateFir(fir_, fir_item_, BlockToHeaderLength(BlockLength()), packet,
+ length);
+}
+} // namespace rtcp
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet.h
new file mode 100644
index 00000000000..f60e848b50b
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet.h
@@ -0,0 +1,726 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_RTCP_PACKET_H_
+#define WEBRTC_MODULES_RTP_RTCP_RTCP_PACKET_H_
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+namespace rtcp {
+
+enum { kCommonFbFmtLength = 12 };
+enum { kReportBlockLength = 24 };
+
+class RawPacket;
+
+// Class for building RTCP packets.
+//
+// Example:
+// ReportBlock report_block;
+// report_block.To(234)
+// report_block.FractionLost(10);
+//
+// ReceiverReport rr;
+// rr.From(123);
+// rr.WithReportBlock(&report_block)
+//
+// Fir fir;
+// fir.From(123);
+// fir.To(234)
+// fir.WithCommandSeqNum(123);
+//
+// size_t length = 0; // Builds an intra frame request
+// uint8_t packet[kPacketSize]; // with sequence number 123.
+// fir.Build(packet, &length, kPacketSize);
+//
+// RawPacket packet = fir.Build(); // Returns a RawPacket holding
+// // the built rtcp packet.
+//
+// rr.Append(&fir) // Builds a compound RTCP packet with
+// RawPacket packet = rr.Build(); // a receiver report, report block
+// // and fir message.
+
+class RtcpPacket {
+ public:
+ virtual ~RtcpPacket() {}
+
+ void Append(RtcpPacket* packet);
+
+ RawPacket Build() const;
+
+ void Build(uint8_t* packet, size_t* length, size_t max_length) const;
+
+ protected:
+ RtcpPacket() : kHeaderLength(4) {}
+
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const = 0;
+
+ const size_t kHeaderLength;
+
+ private:
+ void CreateAndAddAppended(
+ uint8_t* packet, size_t* length, size_t max_length) const;
+
+ std::vector<RtcpPacket*> appended_packets_;
+};
+
+class Empty : public RtcpPacket {
+ public:
+ Empty() {}
+
+ virtual ~Empty() {}
+
+ protected:
+ virtual void Create(uint8_t* packet, size_t* length, size_t max_length) const;
+};
+
+// From RFC 3550, RTP: A Transport Protocol for Real-Time Applications.
+//
+// RTCP report block (RFC 3550).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | SSRC_1 (SSRC of first source) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | fraction lost | cumulative number of packets lost |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | extended highest sequence number received |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | interarrival jitter |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | last SR (LSR) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | delay since last SR (DLSR) |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+
+class ReportBlock {
+ public:
+ ReportBlock() {
+ // TODO(asapersson): Consider adding a constructor to struct.
+ memset(&report_block_, 0, sizeof(report_block_));
+ }
+
+ ~ReportBlock() {}
+
+ void To(uint32_t ssrc) {
+ report_block_.SSRC = ssrc;
+ }
+ void WithFractionLost(uint8_t fraction_lost) {
+ report_block_.FractionLost = fraction_lost;
+ }
+ void WithCumulativeLost(uint32_t cumulative_lost) {
+ report_block_.CumulativeNumOfPacketsLost = cumulative_lost;
+ }
+ void WithExtHighestSeqNum(uint32_t ext_highest_seq_num) {
+ report_block_.ExtendedHighestSequenceNumber = ext_highest_seq_num;
+ }
+ void WithJitter(uint32_t jitter) {
+ report_block_.Jitter = jitter;
+ }
+ void WithLastSr(uint32_t last_sr) {
+ report_block_.LastSR = last_sr;
+ }
+ void WithDelayLastSr(uint32_t delay_last_sr) {
+ report_block_.DelayLastSR = delay_last_sr;
+ }
+
+ private:
+ friend class SenderReport;
+ friend class ReceiverReport;
+ RTCPUtility::RTCPPacketReportBlockItem report_block_;
+};
+
+// RTCP sender report (RFC 3550).
+//
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| RC | PT=SR=200 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of sender |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | NTP timestamp, most significant word |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | NTP timestamp, least significant word |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | RTP timestamp |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | sender's packet count |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | sender's octet count |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | report block(s) |
+// | .... |
+
+class SenderReport : public RtcpPacket {
+ public:
+ SenderReport() : RtcpPacket() {
+ memset(&sr_, 0, sizeof(sr_));
+ }
+
+ virtual ~SenderReport() {}
+
+ void From(uint32_t ssrc) {
+ sr_.SenderSSRC = ssrc;
+ }
+ void WithNtpSec(uint32_t sec) {
+ sr_.NTPMostSignificant = sec;
+ }
+ void WithNtpFrac(uint32_t frac) {
+ sr_.NTPLeastSignificant = frac;
+ }
+ void WithRtpTimestamp(uint32_t rtp_timestamp) {
+ sr_.RTPTimestamp = rtp_timestamp;
+ }
+ void WithPacketCount(uint32_t packet_count) {
+ sr_.SenderPacketCount = packet_count;
+ }
+ void WithOctetCount(uint32_t octet_count) {
+ sr_.SenderOctetCount = octet_count;
+ }
+ void WithReportBlock(ReportBlock* block);
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ enum { kMaxNumberOfReportBlocks = 0x1f };
+
+ size_t BlockLength() const {
+ const size_t kSrHeaderLength = 8;
+ const size_t kSenderInfoLength = 20;
+ return kSrHeaderLength + kSenderInfoLength +
+ report_blocks_.size() * kReportBlockLength;
+ }
+
+ RTCPUtility::RTCPPacketSR sr_;
+ std::vector<RTCPUtility::RTCPPacketReportBlockItem> report_blocks_;
+};
+
+//
+// RTCP receiver report (RFC 3550).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| RC | PT=RR=201 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of packet sender |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// | report block(s) |
+// | .... |
+
+class ReceiverReport : public RtcpPacket {
+ public:
+ ReceiverReport() : RtcpPacket() {
+ memset(&rr_, 0, sizeof(rr_));
+ }
+
+ virtual ~ReceiverReport() {}
+
+ void From(uint32_t ssrc) {
+ rr_.SenderSSRC = ssrc;
+ }
+ void WithReportBlock(ReportBlock* block);
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ enum { kMaxNumberOfReportBlocks = 0x1f };
+
+ size_t BlockLength() const {
+ const size_t kRrHeaderLength = 8;
+ return kRrHeaderLength + report_blocks_.size() * kReportBlockLength;
+ }
+
+ RTCPUtility::RTCPPacketRR rr_;
+ std::vector<RTCPUtility::RTCPPacketReportBlockItem> report_blocks_;
+};
+
+// Transmission Time Offsets in RTP Streams (RFC 5450).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// hdr |V=2|P| RC | PT=IJ=195 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | inter-arrival jitter |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// . .
+// . .
+// . .
+// | inter-arrival jitter |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//
+// If present, this RTCP packet must be placed after a receiver report
+// (inside a compound RTCP packet), and MUST have the same value for RC
+// (reception report count) as the receiver report.
+
+class Ij : public RtcpPacket {
+ public:
+ Ij() : RtcpPacket() {}
+
+ virtual ~Ij() {}
+
+ void WithJitterItem(uint32_t jitter);
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ enum { kMaxNumberOfIjItems = 0x1f };
+
+ size_t BlockLength() const {
+ return kHeaderLength + 4 * ij_items_.size();
+ }
+
+ std::vector<uint32_t> ij_items_;
+
+ DISALLOW_COPY_AND_ASSIGN(Ij);
+};
+
+// Source Description (SDES) (RFC 3550).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// header |V=2|P| SC | PT=SDES=202 | length |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// chunk | SSRC/CSRC_1 |
+// 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SDES items |
+// | ... |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// chunk | SSRC/CSRC_2 |
+// 2 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SDES items |
+// | ... |
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+//
+// Canonical End-Point Identifier SDES Item (CNAME)
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | CNAME=1 | length | user and domain name ...
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+class Sdes : public RtcpPacket {
+ public:
+ Sdes() : RtcpPacket() {}
+
+ virtual ~Sdes() {}
+
+ void WithCName(uint32_t ssrc, std::string cname);
+
+ struct Chunk {
+ uint32_t ssrc;
+ std::string name;
+ int null_octets;
+ };
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ enum { kMaxNumberOfChunks = 0x1f };
+
+ size_t BlockLength() const;
+
+ std::vector<Chunk> chunks_;
+
+ DISALLOW_COPY_AND_ASSIGN(Sdes);
+};
+
+//
+// Bye packet (BYE) (RFC 3550).
+//
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| SC | PT=BYE=203 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC/CSRC |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// : ... :
+// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+// (opt) | length | reason for leaving ...
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+class Bye : public RtcpPacket {
+ public:
+ Bye() : RtcpPacket() {
+ memset(&bye_, 0, sizeof(bye_));
+ }
+
+ virtual ~Bye() {}
+
+ void From(uint32_t ssrc) {
+ bye_.SenderSSRC = ssrc;
+ }
+ void WithCsrc(uint32_t csrc);
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ enum { kMaxNumberOfCsrcs = 0x1f - 1 };
+
+ size_t BlockLength() const {
+ size_t source_count = 1 + csrcs_.size();
+ return kHeaderLength + 4 * source_count;
+ }
+
+ RTCPUtility::RTCPPacketBYE bye_;
+ std::vector<uint32_t> csrcs_;
+};
+
+// Application-Defined packet (APP) (RFC 3550).
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| subtype | PT=APP=204 | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC/CSRC |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | name (ASCII) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | application-dependent data ...
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+class App : public RtcpPacket {
+ public:
+ App()
+ : RtcpPacket(),
+ ssrc_(0) {
+ memset(&app_, 0, sizeof(app_));
+ }
+
+ virtual ~App() {}
+
+ void From(uint32_t ssrc) {
+ ssrc_ = ssrc;
+ }
+ void WithSubType(uint8_t subtype) {
+ assert(subtype <= 0x1f);
+ app_.SubType = subtype;
+ }
+ void WithName(uint32_t name) {
+ app_.Name = name;
+ }
+ void WithData(const uint8_t* data, uint16_t data_length) {
+ assert(data);
+ assert(data_length <= kRtcpAppCode_DATA_SIZE);
+ assert(data_length % 4 == 0);
+ memcpy(app_.Data, data, data_length);
+ app_.Size = data_length;
+ }
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ size_t BlockLength() const {
+ return 12 + app_.Size;
+ }
+
+ uint32_t ssrc_;
+ RTCPUtility::RTCPPacketAPP app_;
+
+ DISALLOW_COPY_AND_ASSIGN(App);
+};
+
+// RFC 4585: Feedback format.
+//
+// Common packet format:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |V=2|P| FMT | PT | length |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of packet sender |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC of media source |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// : Feedback Control Information (FCI) :
+// :
+
+// Picture loss indication (PLI) (RFC 4585).
+//
+// FCI: no feedback control information.
+
+class Pli : public RtcpPacket {
+ public:
+ Pli() : RtcpPacket() {
+ memset(&pli_, 0, sizeof(pli_));
+ }
+
+ virtual ~Pli() {}
+
+ void From(uint32_t ssrc) {
+ pli_.SenderSSRC = ssrc;
+ }
+ void To(uint32_t ssrc) {
+ pli_.MediaSSRC = ssrc;
+ }
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ size_t BlockLength() const {
+ return kCommonFbFmtLength;
+ }
+
+ RTCPUtility::RTCPPacketPSFBPLI pli_;
+
+ DISALLOW_COPY_AND_ASSIGN(Pli);
+};
+
+// Slice loss indication (SLI) (RFC 4585).
+//
+// FCI:
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | First | Number | PictureID |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+class Sli : public RtcpPacket {
+ public:
+ Sli() : RtcpPacket() {
+ memset(&sli_, 0, sizeof(sli_));
+ memset(&sli_item_, 0, sizeof(sli_item_));
+ }
+
+ virtual ~Sli() {}
+
+ void From(uint32_t ssrc) {
+ sli_.SenderSSRC = ssrc;
+ }
+ void To(uint32_t ssrc) {
+ sli_.MediaSSRC = ssrc;
+ }
+ void WithFirstMb(uint16_t first_mb) {
+ assert(first_mb <= 0x1fff);
+ sli_item_.FirstMB = first_mb;
+ }
+ void WithNumberOfMb(uint16_t number_mb) {
+ assert(number_mb <= 0x1fff);
+ sli_item_.NumberOfMB = number_mb;
+ }
+ void WithPictureId(uint8_t picture_id) {
+ assert(picture_id <= 0x3f);
+ sli_item_.PictureId = picture_id;
+ }
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ size_t BlockLength() const {
+ const size_t kFciLength = 4;
+ return kCommonFbFmtLength + kFciLength;
+ }
+
+ RTCPUtility::RTCPPacketPSFBSLI sli_;
+ RTCPUtility::RTCPPacketPSFBSLIItem sli_item_;
+
+ DISALLOW_COPY_AND_ASSIGN(Sli);
+};
+
+// Generic NACK (RFC 4585).
+//
+// FCI:
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | PID | BLP |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+class Nack : public RtcpPacket {
+ public:
+ Nack() : RtcpPacket() {
+ memset(&nack_, 0, sizeof(nack_));
+ }
+
+ virtual ~Nack() {}
+
+ void From(uint32_t ssrc) {
+ nack_.SenderSSRC = ssrc;
+ }
+ void To(uint32_t ssrc) {
+ nack_.MediaSSRC = ssrc;
+ }
+ void WithList(const uint16_t* nack_list, int length);
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ size_t BlockLength() const {
+ size_t fci_length = 4 * nack_fields_.size();
+ return kCommonFbFmtLength + fci_length;
+ }
+
+ RTCPUtility::RTCPPacketRTPFBNACK nack_;
+ std::vector<RTCPUtility::RTCPPacketRTPFBNACKItem> nack_fields_;
+
+ DISALLOW_COPY_AND_ASSIGN(Nack);
+};
+
+// Reference picture selection indication (RPSI) (RFC 4585).
+//
+// FCI:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | PB |0| Payload Type| Native RPSI bit string |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | defined per codec ... | Padding (0) |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+class Rpsi : public RtcpPacket {
+ public:
+ Rpsi()
+ : RtcpPacket(),
+ padding_bytes_(0) {
+ memset(&rpsi_, 0, sizeof(rpsi_));
+ }
+
+ virtual ~Rpsi() {}
+
+ void From(uint32_t ssrc) {
+ rpsi_.SenderSSRC = ssrc;
+ }
+ void To(uint32_t ssrc) {
+ rpsi_.MediaSSRC = ssrc;
+ }
+ void WithPayloadType(uint8_t payload) {
+ assert(payload <= 0x7f);
+ rpsi_.PayloadType = payload;
+ }
+ void WithPictureId(uint64_t picture_id);
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ size_t BlockLength() const {
+ size_t fci_length = 2 + (rpsi_.NumberOfValidBits / 8) + padding_bytes_;
+ return kCommonFbFmtLength + fci_length;
+ }
+
+ uint8_t padding_bytes_;
+ RTCPUtility::RTCPPacketPSFBRPSI rpsi_;
+
+ DISALLOW_COPY_AND_ASSIGN(Rpsi);
+};
+
+// Full intra request (FIR) (RFC 5104).
+//
+// FCI:
+//
+// 0 1 2 3
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | SSRC |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | Seq nr. | Reserved |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+class Fir : public RtcpPacket {
+ public:
+ Fir()
+ : RtcpPacket() {
+ memset(&fir_, 0, sizeof(fir_));
+ memset(&fir_item_, 0, sizeof(fir_item_));
+ }
+
+ virtual ~Fir() {}
+
+ void From(uint32_t ssrc) {
+ fir_.SenderSSRC = ssrc;
+ }
+ void To(uint32_t ssrc) {
+ fir_item_.SSRC = ssrc;
+ }
+ void WithCommandSeqNum(uint8_t seq_num) {
+ fir_item_.CommandSequenceNumber = seq_num;
+ }
+
+ protected:
+ virtual void Create(
+ uint8_t* packet, size_t* length, size_t max_length) const OVERRIDE;
+
+ private:
+ size_t BlockLength() const {
+ const size_t kFciLength = 8;
+ return kCommonFbFmtLength + kFciLength;
+ }
+
+ RTCPUtility::RTCPPacketPSFBFIR fir_;
+ RTCPUtility::RTCPPacketPSFBFIRItem fir_item_;
+};
+
+// Class holding a RTCP packet.
+//
+// Takes a built rtcp packet.
+// RawPacket raw_packet(buffer, length);
+//
+// To access the raw packet:
+// raw_packet.buffer(); - pointer to the raw packet
+// raw_packet.buffer_length(); - the length of the raw packet
+
+class RawPacket {
+ public:
+ RawPacket(const uint8_t* packet, size_t length) {
+ assert(length <= IP_PACKET_SIZE);
+ memcpy(buffer_, packet, length);
+ buffer_length_ = length;
+ }
+
+ const uint8_t* buffer() {
+ return buffer_;
+ }
+ size_t buffer_length() const {
+ return buffer_length_;
+ }
+
+ private:
+ size_t buffer_length_;
+ uint8_t buffer_[IP_PACKET_SIZE];
+};
+
+} // namespace rtcp
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_RTCP_PACKET_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc
new file mode 100644
index 00000000000..aa25c2e5f9d
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_packet_unittest.cc
@@ -0,0 +1,592 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ * This file includes unit tests for the RtcpPacket.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
+#include "webrtc/test/rtcp_packet_parser.h"
+
+using webrtc::rtcp::App;
+using webrtc::rtcp::Bye;
+using webrtc::rtcp::Empty;
+using webrtc::rtcp::Fir;
+using webrtc::rtcp::Ij;
+using webrtc::rtcp::Nack;
+using webrtc::rtcp::Pli;
+using webrtc::rtcp::Sdes;
+using webrtc::rtcp::SenderReport;
+using webrtc::rtcp::Sli;
+using webrtc::rtcp::RawPacket;
+using webrtc::rtcp::ReceiverReport;
+using webrtc::rtcp::ReportBlock;
+using webrtc::rtcp::Rpsi;
+using webrtc::test::RtcpPacketParser;
+
+namespace webrtc {
+
+const uint32_t kSenderSsrc = 0x12345678;
+const uint32_t kRemoteSsrc = 0x23456789;
+
+TEST(RtcpPacketTest, Rr) {
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+
+ RawPacket packet = rr.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.receiver_report()->Ssrc());
+ EXPECT_EQ(0, parser.report_block()->num_packets());
+}
+
+TEST(RtcpPacketTest, RrWithOneReportBlock) {
+ ReportBlock rb;
+ rb.To(kRemoteSsrc);
+ rb.WithFractionLost(55);
+ rb.WithCumulativeLost(0x111111);
+ rb.WithExtHighestSeqNum(0x22222222);
+ rb.WithJitter(0x33333333);
+ rb.WithLastSr(0x44444444);
+ rb.WithDelayLastSr(0x55555555);
+
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+ rr.WithReportBlock(&rb);
+
+ RawPacket packet = rr.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.receiver_report()->Ssrc());
+ EXPECT_EQ(1, parser.report_block()->num_packets());
+ EXPECT_EQ(kRemoteSsrc, parser.report_block()->Ssrc());
+ EXPECT_EQ(55U, parser.report_block()->FractionLost());
+ EXPECT_EQ(0x111111U, parser.report_block()->CumPacketLost());
+ EXPECT_EQ(0x22222222U, parser.report_block()->ExtHighestSeqNum());
+ EXPECT_EQ(0x33333333U, parser.report_block()->Jitter());
+ EXPECT_EQ(0x44444444U, parser.report_block()->LastSr());
+ EXPECT_EQ(0x55555555U, parser.report_block()->DelayLastSr());
+}
+
+TEST(RtcpPacketTest, RrWithTwoReportBlocks) {
+ ReportBlock rb1;
+ rb1.To(kRemoteSsrc);
+ ReportBlock rb2;
+ rb2.To(kRemoteSsrc + 1);
+
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+ rr.WithReportBlock(&rb1);
+ rr.WithReportBlock(&rb2);
+
+ RawPacket packet = rr.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.receiver_report()->Ssrc());
+ EXPECT_EQ(2, parser.report_block()->num_packets());
+ EXPECT_EQ(1, parser.report_blocks_per_ssrc(kRemoteSsrc));
+ EXPECT_EQ(1, parser.report_blocks_per_ssrc(kRemoteSsrc + 1));
+}
+
+TEST(RtcpPacketTest, Sr) {
+ SenderReport sr;
+ sr.From(kSenderSsrc);
+ sr.WithNtpSec(0x11111111);
+ sr.WithNtpFrac(0x22222222);
+ sr.WithRtpTimestamp(0x33333333);
+ sr.WithPacketCount(0x44444444);
+ sr.WithOctetCount(0x55555555);
+
+ RawPacket packet = sr.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+
+ EXPECT_EQ(1, parser.sender_report()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.sender_report()->Ssrc());
+ EXPECT_EQ(0x11111111U, parser.sender_report()->NtpSec());
+ EXPECT_EQ(0x22222222U, parser.sender_report()->NtpFrac());
+ EXPECT_EQ(0x33333333U, parser.sender_report()->RtpTimestamp());
+ EXPECT_EQ(0x44444444U, parser.sender_report()->PacketCount());
+ EXPECT_EQ(0x55555555U, parser.sender_report()->OctetCount());
+ EXPECT_EQ(0, parser.report_block()->num_packets());
+}
+
+TEST(RtcpPacketTest, SrWithOneReportBlock) {
+ ReportBlock rb;
+ rb.To(kRemoteSsrc);
+
+ SenderReport sr;
+ sr.From(kSenderSsrc);
+ sr.WithReportBlock(&rb);
+
+ RawPacket packet = sr.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.sender_report()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.sender_report()->Ssrc());
+ EXPECT_EQ(1, parser.report_block()->num_packets());
+ EXPECT_EQ(kRemoteSsrc, parser.report_block()->Ssrc());
+}
+
+TEST(RtcpPacketTest, SrWithTwoReportBlocks) {
+ ReportBlock rb1;
+ rb1.To(kRemoteSsrc);
+ ReportBlock rb2;
+ rb2.To(kRemoteSsrc + 1);
+
+ SenderReport sr;
+ sr.From(kSenderSsrc);
+ sr.WithReportBlock(&rb1);
+ sr.WithReportBlock(&rb2);
+
+ RawPacket packet = sr.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.sender_report()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.sender_report()->Ssrc());
+ EXPECT_EQ(2, parser.report_block()->num_packets());
+ EXPECT_EQ(1, parser.report_blocks_per_ssrc(kRemoteSsrc));
+ EXPECT_EQ(1, parser.report_blocks_per_ssrc(kRemoteSsrc + 1));
+}
+
+TEST(RtcpPacketTest, IjNoItem) {
+ Ij ij;
+
+ RawPacket packet = ij.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.ij()->num_packets());
+ EXPECT_EQ(0, parser.ij_item()->num_packets());
+}
+
+TEST(RtcpPacketTest, IjOneItem) {
+ Ij ij;
+ ij.WithJitterItem(0x11111111);
+
+ RawPacket packet = ij.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.ij()->num_packets());
+ EXPECT_EQ(1, parser.ij_item()->num_packets());
+ EXPECT_EQ(0x11111111U, parser.ij_item()->Jitter());
+}
+
+TEST(RtcpPacketTest, IjTwoItems) {
+ Ij ij;
+ ij.WithJitterItem(0x11111111);
+ ij.WithJitterItem(0x22222222);
+
+ RawPacket packet = ij.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.ij()->num_packets());
+ EXPECT_EQ(2, parser.ij_item()->num_packets());
+ EXPECT_EQ(0x22222222U, parser.ij_item()->Jitter());
+}
+
+TEST(RtcpPacketTest, AppWithNoData) {
+ App app;
+ app.WithSubType(30);
+ uint32_t name = 'n' << 24;
+ name += 'a' << 16;
+ name += 'm' << 8;
+ name += 'e';
+ app.WithName(name);
+
+ RawPacket packet = app.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.app()->num_packets());
+ EXPECT_EQ(30U, parser.app()->SubType());
+ EXPECT_EQ(name, parser.app()->Name());
+ EXPECT_EQ(0, parser.app_item()->num_packets());
+}
+
+TEST(RtcpPacketTest, App) {
+ App app;
+ app.From(kSenderSsrc);
+ app.WithSubType(30);
+ uint32_t name = 'n' << 24;
+ name += 'a' << 16;
+ name += 'm' << 8;
+ name += 'e';
+ app.WithName(name);
+ const char kData[] = {'t', 'e', 's', 't', 'd', 'a', 't', 'a'};
+ const size_t kDataLength = sizeof(kData) / sizeof(kData[0]);
+ app.WithData((const uint8_t*)kData, kDataLength);
+
+ RawPacket packet = app.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.app()->num_packets());
+ EXPECT_EQ(30U, parser.app()->SubType());
+ EXPECT_EQ(name, parser.app()->Name());
+ EXPECT_EQ(1, parser.app_item()->num_packets());
+ EXPECT_EQ(kDataLength, parser.app_item()->DataLength());
+ EXPECT_EQ(0, strncmp(kData, (const char*)parser.app_item()->Data(),
+ parser.app_item()->DataLength()));
+}
+
+TEST(RtcpPacketTest, SdesWithOneChunk) {
+ Sdes sdes;
+ sdes.WithCName(kSenderSsrc, "alice@host");
+
+ RawPacket packet = sdes.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.sdes()->num_packets());
+ EXPECT_EQ(1, parser.sdes_chunk()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.sdes_chunk()->Ssrc());
+ EXPECT_EQ("alice@host", parser.sdes_chunk()->Cname());
+}
+
+TEST(RtcpPacketTest, SdesWithMultipleChunks) {
+ Sdes sdes;
+ sdes.WithCName(kSenderSsrc, "a");
+ sdes.WithCName(kSenderSsrc + 1, "ab");
+ sdes.WithCName(kSenderSsrc + 2, "abc");
+ sdes.WithCName(kSenderSsrc + 3, "abcd");
+ sdes.WithCName(kSenderSsrc + 4, "abcde");
+ sdes.WithCName(kSenderSsrc + 5, "abcdef");
+
+ RawPacket packet = sdes.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.sdes()->num_packets());
+ EXPECT_EQ(6, parser.sdes_chunk()->num_packets());
+ EXPECT_EQ(kSenderSsrc + 5, parser.sdes_chunk()->Ssrc());
+ EXPECT_EQ("abcdef", parser.sdes_chunk()->Cname());
+}
+
+TEST(RtcpPacketTest, CnameItemWithEmptyString) {
+ Sdes sdes;
+ sdes.WithCName(kSenderSsrc, "");
+
+ RawPacket packet = sdes.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.sdes()->num_packets());
+ EXPECT_EQ(1, parser.sdes_chunk()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.sdes_chunk()->Ssrc());
+ EXPECT_EQ("", parser.sdes_chunk()->Cname());
+}
+
+TEST(RtcpPacketTest, Pli) {
+ Pli pli;
+ pli.From(kSenderSsrc);
+ pli.To(kRemoteSsrc);
+
+ RawPacket packet = pli.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.pli()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.pli()->Ssrc());
+ EXPECT_EQ(kRemoteSsrc, parser.pli()->MediaSsrc());
+}
+
+TEST(RtcpPacketTest, Sli) {
+ const uint16_t kFirstMb = 7777;
+ const uint16_t kNumberOfMb = 6666;
+ const uint8_t kPictureId = 60;
+ Sli sli;
+ sli.From(kSenderSsrc);
+ sli.To(kRemoteSsrc);
+ sli.WithFirstMb(kFirstMb);
+ sli.WithNumberOfMb(kNumberOfMb);
+ sli.WithPictureId(kPictureId);
+
+ RawPacket packet = sli.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.sli()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.sli()->Ssrc());
+ EXPECT_EQ(kRemoteSsrc, parser.sli()->MediaSsrc());
+ EXPECT_EQ(1, parser.sli_item()->num_packets());
+ EXPECT_EQ(kFirstMb, parser.sli_item()->FirstMb());
+ EXPECT_EQ(kNumberOfMb, parser.sli_item()->NumberOfMb());
+ EXPECT_EQ(kPictureId, parser.sli_item()->PictureId());
+}
+
+TEST(RtcpPacketTest, Nack) {
+ Nack nack;
+ const uint16_t kList[] = {0, 1, 3, 8, 16};
+ const uint16_t kListLength = sizeof(kList) / sizeof(kList[0]);
+ nack.From(kSenderSsrc);
+ nack.To(kRemoteSsrc);
+ nack.WithList(kList, kListLength);
+ RawPacket packet = nack.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.nack()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.nack()->Ssrc());
+ EXPECT_EQ(kRemoteSsrc, parser.nack()->MediaSsrc());
+ EXPECT_EQ(1, parser.nack_item()->num_packets());
+ std::vector<uint16_t> seqs = parser.nack_item()->last_nack_list();
+ EXPECT_EQ(kListLength, seqs.size());
+ for (size_t i = 0; i < kListLength; ++i) {
+ EXPECT_EQ(kList[i], seqs[i]);
+ }
+}
+
+TEST(RtcpPacketTest, NackWithWrap) {
+ Nack nack;
+ const uint16_t kList[] = {65500, 65516, 65534, 65535, 0, 1, 3, 20, 100};
+ const uint16_t kListLength = sizeof(kList) / sizeof(kList[0]);
+ nack.From(kSenderSsrc);
+ nack.To(kRemoteSsrc);
+ nack.WithList(kList, kListLength);
+ RawPacket packet = nack.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.nack()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.nack()->Ssrc());
+ EXPECT_EQ(kRemoteSsrc, parser.nack()->MediaSsrc());
+ EXPECT_EQ(4, parser.nack_item()->num_packets());
+ std::vector<uint16_t> seqs = parser.nack_item()->last_nack_list();
+ EXPECT_EQ(kListLength, seqs.size());
+ for (size_t i = 0; i < kListLength; ++i) {
+ EXPECT_EQ(kList[i], seqs[i]);
+ }
+}
+
+TEST(RtcpPacketTest, Rpsi) {
+ Rpsi rpsi;
+ // 1000001 (7 bits = 1 byte in native string).
+ const uint64_t kPictureId = 0x41;
+ const uint16_t kNumberOfValidBytes = 1;
+ rpsi.WithPayloadType(100);
+ rpsi.WithPictureId(kPictureId);
+
+ RawPacket packet = rpsi.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(100, parser.rpsi()->PayloadType());
+ EXPECT_EQ(kNumberOfValidBytes * 8, parser.rpsi()->NumberOfValidBits());
+ EXPECT_EQ(kPictureId, parser.rpsi()->PictureId());
+}
+
+TEST(RtcpPacketTest, RpsiWithTwoByteNativeString) {
+ Rpsi rpsi;
+ // |1 0000001 (7 bits = 1 byte in native string).
+ const uint64_t kPictureId = 0x81;
+ const uint16_t kNumberOfValidBytes = 2;
+ rpsi.WithPictureId(kPictureId);
+
+ RawPacket packet = rpsi.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(kNumberOfValidBytes * 8, parser.rpsi()->NumberOfValidBits());
+ EXPECT_EQ(kPictureId, parser.rpsi()->PictureId());
+}
+
+TEST(RtcpPacketTest, RpsiWithThreeByteNativeString) {
+ Rpsi rpsi;
+ // 10000|00 100000|0 1000000 (7 bits = 1 byte in native string).
+ const uint64_t kPictureId = 0x102040;
+ const uint16_t kNumberOfValidBytes = 3;
+ rpsi.WithPictureId(kPictureId);
+
+ RawPacket packet = rpsi.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(kNumberOfValidBytes * 8, parser.rpsi()->NumberOfValidBits());
+ EXPECT_EQ(kPictureId, parser.rpsi()->PictureId());
+}
+
+TEST(RtcpPacketTest, RpsiWithFourByteNativeString) {
+ Rpsi rpsi;
+ // 1000|001 00001|01 100001|1 1000010 (7 bits = 1 byte in native string).
+ const uint64_t kPictureId = 0x84161C2;
+ const uint16_t kNumberOfValidBytes = 4;
+ rpsi.WithPictureId(kPictureId);
+
+ RawPacket packet = rpsi.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(kNumberOfValidBytes * 8, parser.rpsi()->NumberOfValidBits());
+ EXPECT_EQ(kPictureId, parser.rpsi()->PictureId());
+}
+
+TEST(RtcpPacketTest, RpsiWithMaxPictureId) {
+ Rpsi rpsi;
+ // 1 1111111| 1111111 1|111111 11|11111 111|1111 1111|111 11111|
+ // 11 111111|1 1111111 (7 bits = 1 byte in native string).
+ const uint64_t kPictureId = 0xffffffffffffffff;
+ const uint16_t kNumberOfValidBytes = 10;
+ rpsi.WithPictureId(kPictureId);
+
+ RawPacket packet = rpsi.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(kNumberOfValidBytes * 8, parser.rpsi()->NumberOfValidBits());
+ EXPECT_EQ(kPictureId, parser.rpsi()->PictureId());
+}
+
+TEST(RtcpPacketTest, Fir) {
+ Fir fir;
+ fir.From(kSenderSsrc);
+ fir.To(kRemoteSsrc);
+ fir.WithCommandSeqNum(123);
+
+ RawPacket packet = fir.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.fir()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.fir()->Ssrc());
+ EXPECT_EQ(1, parser.fir_item()->num_packets());
+ EXPECT_EQ(kRemoteSsrc, parser.fir_item()->Ssrc());
+ EXPECT_EQ(123U, parser.fir_item()->SeqNum());
+}
+
+TEST(RtcpPacketTest, AppendPacket) {
+ Fir fir;
+ ReportBlock rb;
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+ rr.WithReportBlock(&rb);
+ rr.Append(&fir);
+
+ RawPacket packet = rr.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.receiver_report()->Ssrc());
+ EXPECT_EQ(1, parser.report_block()->num_packets());
+ EXPECT_EQ(1, parser.fir()->num_packets());
+}
+
+TEST(RtcpPacketTest, AppendPacketOnEmpty) {
+ Empty empty;
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+ empty.Append(&rr);
+
+ RawPacket packet = empty.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(0, parser.report_block()->num_packets());
+}
+
+TEST(RtcpPacketTest, AppendPacketWithOwnAppendedPacket) {
+ Fir fir;
+ Bye bye;
+ ReportBlock rb;
+
+ ReceiverReport rr;
+ rr.WithReportBlock(&rb);
+ rr.Append(&fir);
+
+ SenderReport sr;
+ sr.Append(&bye);
+ sr.Append(&rr);
+
+ RawPacket packet = sr.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.sender_report()->num_packets());
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(1, parser.report_block()->num_packets());
+ EXPECT_EQ(1, parser.bye()->num_packets());
+ EXPECT_EQ(1, parser.fir()->num_packets());
+}
+
+TEST(RtcpPacketTest, Bye) {
+ Bye bye;
+ bye.From(kSenderSsrc);
+
+ RawPacket packet = bye.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.bye()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.bye()->Ssrc());
+}
+
+TEST(RtcpPacketTest, ByeWithCsrcs) {
+ Fir fir;
+ Bye bye;
+ bye.From(kSenderSsrc);
+ bye.WithCsrc(0x22222222);
+ bye.WithCsrc(0x33333333);
+ bye.Append(&fir);
+
+ RawPacket packet = bye.Build();
+ RtcpPacketParser parser;
+ parser.Parse(packet.buffer(), packet.buffer_length());
+ EXPECT_EQ(1, parser.bye()->num_packets());
+ EXPECT_EQ(kSenderSsrc, parser.bye()->Ssrc());
+ EXPECT_EQ(1, parser.fir()->num_packets());
+}
+
+TEST(RtcpPacketTest, BuildWithInputBuffer) {
+ Fir fir;
+ ReportBlock rb;
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+ rr.WithReportBlock(&rb);
+ rr.Append(&fir);
+
+ const size_t kRrLength = 8;
+ const size_t kReportBlockLength = 24;
+ const size_t kFirLength = 20;
+
+ size_t len = 0;
+ uint8_t packet[kRrLength + kReportBlockLength + kFirLength];
+ rr.Build(packet, &len, kRrLength + kReportBlockLength + kFirLength);
+
+ RtcpPacketParser parser;
+ parser.Parse(packet, len);
+ EXPECT_EQ(1, parser.receiver_report()->num_packets());
+ EXPECT_EQ(1, parser.report_block()->num_packets());
+ EXPECT_EQ(1, parser.fir()->num_packets());
+}
+
+TEST(RtcpPacketTest, BuildWithTooSmallBuffer) {
+ ReportBlock rb;
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+ rr.WithReportBlock(&rb);
+
+ const size_t kRrLength = 8;
+ const size_t kReportBlockLength = 24;
+
+ // No packet.
+ size_t len = 0;
+ uint8_t packet[kRrLength + kReportBlockLength - 1];
+ rr.Build(packet, &len, kRrLength + kReportBlockLength - 1);
+ RtcpPacketParser parser;
+ parser.Parse(packet, len);
+ EXPECT_EQ(0U, len);
+}
+
+TEST(RtcpPacketTest, BuildWithTooSmallBuffer_LastBlockFits) {
+ Fir fir;
+ ReportBlock rb;
+ ReceiverReport rr;
+ rr.From(kSenderSsrc);
+ rr.WithReportBlock(&rb);
+ rr.Append(&fir);
+
+ const size_t kRrLength = 8;
+ const size_t kReportBlockLength = 24;
+
+ size_t len = 0;
+ uint8_t packet[kRrLength + kReportBlockLength - 1];
+ rr.Build(packet, &len, kRrLength + kReportBlockLength - 1);
+ RtcpPacketParser parser;
+ parser.Parse(packet, len);
+ EXPECT_EQ(0, parser.receiver_report()->num_packets());
+ EXPECT_EQ(0, parser.report_block()->num_packets());
+ EXPECT_EQ(1, parser.fir()->num_packets());
+}
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
index a95fddede25..896bd5f4d3a 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc
@@ -18,7 +18,7 @@
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
@@ -57,7 +57,6 @@ RTCPReceiver::RTCPReceiver(const int32_t id, Clock* clock,
_lastIncreasedSequenceNumberMs(0),
stats_callback_(NULL) {
memset(&_remoteSenderInfo, 0, sizeof(_remoteSenderInfo));
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
}
RTCPReceiver::~RTCPReceiver() {
@@ -82,8 +81,6 @@ RTCPReceiver::~RTCPReceiver() {
delete first->second;
_receivedCnameMap.erase(first);
}
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id,
- "%s deleted", __FUNCTION__);
}
void
@@ -178,8 +175,7 @@ int32_t RTCPReceiver::ResetRTT(const uint32_t remoteSSRC) {
RTCPReportBlockInformation* reportBlock =
GetReportBlockInformation(remoteSSRC);
if (reportBlock == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "\tfailed to GetReportBlockInformation(%u)", remoteSSRC);
+ LOG(LS_WARNING) << "Failed to reset rtt for ssrc " << remoteSSRC;
return -1;
}
reportBlock->RTT = 0;
@@ -282,22 +278,14 @@ bool RTCPReceiver::LastReceivedXrReferenceTimeInfo(
return true;
}
-int32_t
-RTCPReceiver::SenderInfoReceived(RTCPSenderInfo* senderInfo) const
-{
- if(senderInfo == NULL)
- {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
- return -1;
- }
- CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
- if(_lastReceivedSRNTPsecs == 0)
- {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id, "%s No received SR", __FUNCTION__);
- return -1;
- }
- memcpy(senderInfo, &(_remoteSenderInfo), sizeof(RTCPSenderInfo));
- return 0;
+int32_t RTCPReceiver::SenderInfoReceived(RTCPSenderInfo* senderInfo) const {
+ assert(senderInfo);
+ CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ if (_lastReceivedSRNTPsecs == 0) {
+ return -1;
+ }
+ memcpy(senderInfo, &(_remoteSenderInfo), sizeof(RTCPSenderInfo));
+ return 0;
}
// statistics
@@ -317,6 +305,12 @@ int32_t RTCPReceiver::StatisticsReceived(
return 0;
}
+void RTCPReceiver::GetPacketTypeCounter(
+ RtcpPacketTypeCounter* packet_counter) const {
+ CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+ *packet_counter = packet_type_counter_;
+}
+
int32_t
RTCPReceiver::IncomingRTCPPacket(RTCPPacketInformation& rtcpPacketInformation,
RTCPUtility::RTCPParserV2* rtcpParser)
@@ -480,11 +474,12 @@ RTCPReceiver::HandleSenderReceiverReport(RTCPUtility::RTCPParserV2& rtcpParser,
}
// no need for critsect we have _criticalSectionRTCPReceiver
-void
-RTCPReceiver::HandleReportBlock(const RTCPUtility::RTCPPacket& rtcpPacket,
- RTCPPacketInformation& rtcpPacketInformation,
- const uint32_t remoteSSRC,
- const uint8_t numberOfReportBlocks) {
+void RTCPReceiver::HandleReportBlock(
+ const RTCPUtility::RTCPPacket& rtcpPacket,
+ RTCPPacketInformation& rtcpPacketInformation,
+ const uint32_t remoteSSRC,
+ const uint8_t numberOfReportBlocks)
+ EXCLUSIVE_LOCKS_REQUIRED(_criticalSectionRTCPReceiver) {
// This will be called once per report block in the RTCP packet.
// We filter out all report blocks that are not for us.
// Each packet has max 31 RR blocks.
@@ -511,8 +506,8 @@ RTCPReceiver::HandleReportBlock(const RTCPUtility::RTCPPacket& rtcpPacket,
RTCPReportBlockInformation* reportBlock =
CreateReportBlockInformation(remoteSSRC);
if (reportBlock == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "\tfailed to CreateReportBlockInformation(%u)", remoteSSRC);
+ LOG(LS_WARNING) << "Failed to CreateReportBlockInformation("
+ << remoteSSRC << ")";
return;
}
@@ -772,9 +767,6 @@ int32_t RTCPReceiver::BoundingSet(bool &tmmbrOwner, TMMBRSet* boundingSetRec) {
}
RTCPReceiveInformation* receiveInfo = receiveInfoIt->second;
if (receiveInfo == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "%s failed to get RTCPReceiveInformation",
- __FUNCTION__);
return -1;
}
if (receiveInfo->TmmbnBoundingSet.lengthOfSet() > 0) {
@@ -838,6 +830,10 @@ RTCPReceiver::HandleNACK(RTCPUtility::RTCPParserV2& rtcpParser,
HandleNACKItem(rtcpPacket, rtcpPacketInformation);
pktType = rtcpParser.Iterate();
}
+
+ if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpNack) {
+ ++packet_type_counter_.nack_packets;
+ }
}
// no need for critsect we have _criticalSectionRTCPReceiver
@@ -940,7 +936,8 @@ void RTCPReceiver::HandleXrDlrrReportBlock(
void RTCPReceiver::HandleXrDlrrReportBlockItem(
const RTCPUtility::RTCPPacket& packet,
- RTCPPacketInformation& rtcpPacketInformation) {
+ RTCPPacketInformation& rtcpPacketInformation)
+ EXCLUSIVE_LOCKS_REQUIRED(_criticalSectionRTCPReceiver) {
if (registered_ssrcs_.find(packet.XRDLRRReportBlockItem.SSRC) ==
registered_ssrcs_.end()) {
// Not to us.
@@ -1026,6 +1023,7 @@ void RTCPReceiver::HandlePLI(RTCPUtility::RTCPParserV2& rtcpParser,
if (main_ssrc_ == rtcpPacket.PLI.MediaSSRC) {
TRACE_EVENT_INSTANT0("webrtc_rtp", "PLI");
+ ++packet_type_counter_.pli_packets;
// Received a signal that we need to send a new key frame.
rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpPli;
}
@@ -1268,6 +1266,9 @@ void RTCPReceiver::HandleFIRItem(RTCPReceiveInformation* receiveInfo,
if (main_ssrc_ != rtcpPacket.FIRItem.SSRC) {
return;
}
+
+ ++packet_type_counter_.fir_packets;
+
// rtcpPacket.FIR.MediaSSRC SHOULD be 0 but we ignore to check it
// we don't know who this originate from
if (receiveInfo) {
@@ -1332,8 +1333,7 @@ int32_t RTCPReceiver::UpdateTMMBR() {
TMMBRSet* boundingSet = NULL;
numBoundingSet = FindTMMBRBoundingSet(boundingSet);
if (numBoundingSet == -1) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
- "Failed to find TMMBR bounding set.");
+ LOG(LS_WARNING) << "Failed to find TMMBR bounding set.";
return -1;
}
// Set bounding set
@@ -1353,8 +1353,6 @@ int32_t RTCPReceiver::UpdateTMMBR() {
CriticalSectionScoped lock(_criticalSectionFeedbacks);
if (_cbRtcpBandwidthObserver) {
_cbRtcpBandwidthObserver->OnReceivedEstimatedBitrate(bitrate * 1000);
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id,
- "Set TMMBR request:%d kbps", bitrate);
}
}
return 0;
@@ -1379,9 +1377,6 @@ void RTCPReceiver::TriggerCallbacksFromRTCPPacket(
// Process TMMBR and REMB first to avoid multiple callbacks
// to OnNetworkChanged.
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpTmmbr) {
- WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
- "SIG [RTCP] Incoming TMMBR to id:%d", _id);
-
// Might trigger a OnReceivedBandwidthEstimateUpdate.
UpdateTMMBR();
}
@@ -1396,9 +1391,8 @@ void RTCPReceiver::TriggerCallbacksFromRTCPPacket(
}
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpNack) {
if (rtcpPacketInformation.nackSequenceNumbers.size() > 0) {
- WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
- "SIG [RTCP] Incoming NACK length:%d",
- rtcpPacketInformation.nackSequenceNumbers.size());
+ LOG(LS_VERBOSE) << "Incoming NACK length: "
+ << rtcpPacketInformation.nackSequenceNumbers.size();
_rtpRtcp.OnReceivedNACK(rtcpPacketInformation.nackSequenceNumbers);
}
}
@@ -1413,13 +1407,11 @@ void RTCPReceiver::TriggerCallbacksFromRTCPPacket(
if ((rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpPli) ||
(rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpFir)) {
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpPli) {
- WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
- "SIG [RTCP] Incoming PLI from SSRC:0x%x",
- rtcpPacketInformation.remoteSSRC);
+ LOG(LS_VERBOSE) << "Incoming PLI from SSRC "
+ << rtcpPacketInformation.remoteSSRC;
} else {
- WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
- "SIG [RTCP] Incoming FIR from SSRC:0x%x",
- rtcpPacketInformation.remoteSSRC);
+ LOG(LS_VERBOSE) << "Incoming FIR from SSRC "
+ << rtcpPacketInformation.remoteSSRC;
}
_cbRtcpIntraFrameObserver->OnReceivedIntraFrameRequest(local_ssrc);
}
@@ -1434,9 +1426,8 @@ void RTCPReceiver::TriggerCallbacksFromRTCPPacket(
}
if (_cbRtcpBandwidthObserver) {
if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRemb) {
- WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
- "SIG [RTCP] Incoming REMB:%d",
- rtcpPacketInformation.receiverEstimatedMaxBitrate);
+ LOG(LS_VERBOSE) << "Incoming REMB: "
+ << rtcpPacketInformation.receiverEstimatedMaxBitrate;
_cbRtcpBandwidthObserver->OnReceivedEstimatedBitrate(
rtcpPacketInformation.receiverEstimatedMaxBitrate);
}
@@ -1532,9 +1523,6 @@ int32_t RTCPReceiver::TMMBRReceived(const uint32_t size,
while (receiveInfoIt != _receivedInfoMap.end()) {
RTCPReceiveInformation* receiveInfo = receiveInfoIt->second;
if(receiveInfo == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "%s failed to get RTCPReceiveInformation",
- __FUNCTION__);
return -1;
}
num += receiveInfo->TmmbrSet.lengthOfSet();
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
index 637773dc744..ebffb7cfc9b 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver.h
@@ -88,6 +88,8 @@ public:
int32_t StatisticsReceived(
std::vector<RTCPReportBlock>* receiveBlocks) const;
+ void GetPacketTypeCounter(RtcpPacketTypeCounter* packet_counter) const;
+
// Returns true if we haven't received an RTCP RR for several RTCP
// intervals, but only triggers true once.
bool RtcpRrTimeout(int64_t rtcp_interval_ms);
@@ -266,6 +268,8 @@ protected:
int64_t _lastIncreasedSequenceNumberMs;
RtcpStatisticsCallback* stats_callback_;
+
+ RtcpPacketTypeCounter packet_type_counter_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_H_
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h
index 949beb9c7f0..0ca43fa53eb 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_help.h
@@ -12,10 +12,10 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_HELP_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h" // RTCPReportBlock
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/modules/rtp_rtcp/source/tmmbr_help.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
index b3f15bb9662..399c133cd10 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
@@ -261,6 +261,7 @@ class RtcpReceiverTest : public ::testing::Test {
RemoteBitrateEstimatorFactory().Create(
&remote_bitrate_observer_,
&system_clock_,
+ kMimdControl,
kRemoteBitrateEstimatorMinBitrateBps)) {
test_transport_ = new TestTransport();
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
index a407a42c913..d73de9c4243 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.cc
@@ -19,7 +19,7 @@
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
@@ -156,16 +156,11 @@ RTCPSender::RTCPSender(const int32_t id,
xrSendReceiverReferenceTimeEnabled_(false),
_xrSendVoIPMetric(false),
- _xrVoIPMetric(),
- _nackCount(0),
- _pliCount(0),
- _fullIntraRequestCount(0)
+ _xrVoIPMetric()
{
memset(_CNAME, 0, sizeof(_CNAME));
memset(_lastSendReport, 0, sizeof(_lastSendReport));
memset(_lastRTCPTime, 0, sizeof(_lastRTCPTime));
-
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
}
RTCPSender::~RTCPSender() {
@@ -190,8 +185,6 @@ RTCPSender::~RTCPSender() {
}
delete _criticalSectionTransport;
delete _criticalSectionRTCPSender;
-
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id, "%s deleted", __FUNCTION__);
}
int32_t
@@ -239,10 +232,7 @@ RTCPSender::Init()
memset(_lastRTCPTime, 0, sizeof(_lastRTCPTime));
last_xr_rr_.clear();
- _nackCount = 0;
- _pliCount = 0;
- _fullIntraRequestCount = 0;
-
+ memset(&packet_type_counter_, 0, sizeof(packet_type_counter_));
return 0;
}
@@ -354,6 +344,9 @@ RTCPSender::SetREMBData(const uint32_t bitrate,
_rembSSRC[i] = SSRC[i];
}
_sendREMB = true;
+ // Send a REMB immediately if we have a new REMB. The frequency of REMBs is
+ // throttled by the caller.
+ _nextTimeToSendRTCP = _clock->TimeInMilliseconds();
return 0;
}
@@ -430,7 +423,8 @@ RTCPSender::SetCameraDelay(const int32_t delayMS)
CriticalSectionScoped lock(_criticalSectionRTCPSender);
if(delayMS > 1000 || delayMS < -1000)
{
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument, delay can't be larger than 1 sec", __FUNCTION__);
+ LOG(LS_WARNING) << "Delay can't be larger than 1 second: "
+ << delayMS << " ms";
return -1;
}
_cameraDelayMS = delayMS;
@@ -489,14 +483,15 @@ RTCPSender::TimeToSendRTCPReport(const bool sendKeyframeBeforeRTP) const
For audio we use a fix 5 sec interval
For video we use 1 sec interval fo a BW smaller than 360 kbit/s,
- technicaly we break the max 5% RTCP BW for video below 10 kbit/s but that should be extreamly rare
+ technicaly we break the max 5% RTCP BW for video below 10 kbit/s but
+ that should be extremely rare
From RFC 3550
MAX RTCP BW is 5% if the session BW
A send report is approximately 65 bytes inc CNAME
- A report report is approximately 28 bytes
+ A receiver report is approximately 28 bytes
The RECOMMENDED value for the reduced minimum in seconds is 360
divided by the session bandwidth in kilobits/second. This minimum
@@ -558,7 +553,7 @@ From RFC 3550
now += RTCP_SEND_BEFORE_KEY_FRAME_MS;
}
- if(now > _nextTimeToSendRTCP)
+ if(now >= _nextTimeToSendRTCP)
{
return true;
@@ -616,6 +611,12 @@ bool RTCPSender::SendTimeOfXrRrReport(uint32_t mid_ntp,
return true;
}
+void RTCPSender::GetPacketTypeCounter(
+ RtcpPacketTypeCounter* packet_counter) const {
+ CriticalSectionScoped lock(_criticalSectionRTCPSender);
+ *packet_counter = packet_type_counter_;
+}
+
int32_t RTCPSender::AddExternalReportBlock(
uint32_t SSRC,
const RTCPReportBlock* reportBlock) {
@@ -627,15 +628,10 @@ int32_t RTCPSender::AddReportBlock(
uint32_t SSRC,
std::map<uint32_t, RTCPReportBlock*>* report_blocks,
const RTCPReportBlock* reportBlock) {
- if (reportBlock == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "%s invalid argument", __FUNCTION__);
- return -1;
- }
+ assert(reportBlock);
if (report_blocks->size() >= RTCP_MAX_REPORT_BLOCKS) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "%s invalid argument", __FUNCTION__);
+ LOG(LS_WARNING) << "Too many report blocks.";
return -1;
}
std::map<uint32_t, RTCPReportBlock*>::iterator it =
@@ -673,7 +669,7 @@ int32_t RTCPSender::BuildSR(const FeedbackState& feedback_state,
// sanity
if(pos + 52 >= IP_PACKET_SIZE)
{
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+ LOG(LS_WARNING) << "Failed to build Sender Report.";
return -2;
}
uint32_t RTPtime;
@@ -756,8 +752,7 @@ int32_t RTCPSender::BuildSDEC(uint8_t* rtcpbuffer, int& pos) {
// sanity
if(pos + 12 + lengthCname >= IP_PACKET_SIZE) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "%s invalid argument", __FUNCTION__);
+ LOG(LS_WARNING) << "Failed to build SDEC.";
return -2;
}
// SDEC Source Description
@@ -909,7 +904,9 @@ RTCPSender::BuildExtendedJitterReport(
{
if (external_report_blocks_.size() > 0)
{
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id, "Not implemented.");
+ // TODO(andresp): Remove external report blocks since they are not
+ // supported.
+ LOG(LS_ERROR) << "Handling of external report blocks not implemented.";
return 0;
}
@@ -1313,7 +1310,7 @@ RTCPSender::BuildTMMBN(uint8_t* rtcpbuffer, int& pos)
// sanity
if(pos + 12 + boundingSet->lengthOfSet()*8 >= IP_PACKET_SIZE)
{
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+ LOG(LS_WARNING) << "Failed to build TMMBN.";
return -2;
}
uint8_t FMT = 4;
@@ -1380,12 +1377,12 @@ RTCPSender::BuildAPP(uint8_t* rtcpbuffer, int& pos)
// sanity
if(_appData == NULL)
{
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id, "%s invalid state", __FUNCTION__);
+ LOG(LS_WARNING) << "Failed to build app specific.";
return -1;
}
if(pos + 12 + _appLength >= IP_PACKET_SIZE)
{
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+ LOG(LS_WARNING) << "Failed to build app specific.";
return -2;
}
rtcpbuffer[pos++]=(uint8_t)0x80 + _appSubType;
@@ -1421,7 +1418,7 @@ RTCPSender::BuildNACK(uint8_t* rtcpbuffer,
// sanity
if(pos + 16 >= IP_PACKET_SIZE)
{
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+ LOG(LS_WARNING) << "Failed to build NACK.";
return -2;
}
@@ -1474,8 +1471,7 @@ RTCPSender::BuildNACK(uint8_t* rtcpbuffer,
numOfNackFields++;
}
if (i != nackSize) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
- "Nack list to large for one packet.");
+ LOG(LS_WARNING) << "Nack list to large for one packet.";
}
rtcpbuffer[nackSizePos] = static_cast<uint8_t>(2 + numOfNackFields);
*nackString = stringBuilder.GetResult();
@@ -1711,8 +1707,7 @@ int32_t RTCPSender::SendRTCP(const FeedbackState& feedback_state,
CriticalSectionScoped lock(_criticalSectionRTCPSender);
if(_method == kRtcpOff)
{
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
- "%s invalid state", __FUNCTION__);
+ LOG(LS_WARNING) << "Can't send rtcp if it is disabled.";
return -1;
}
}
@@ -1778,10 +1773,9 @@ int RTCPSender::PrepareRTCP(const FeedbackState& feedback_state,
rtcpPacketTypeFlags |= kRtcpTmmbn;
_sendTMMBN = false;
}
- if (xrSendReceiverReferenceTimeEnabled_ &&
- (rtcpPacketTypeFlags & kRtcpReport))
+ if (rtcpPacketTypeFlags & kRtcpReport)
{
- if (!_sending)
+ if (xrSendReceiverReferenceTimeEnabled_ && !_sending)
{
rtcpPacketTypeFlags |= kRtcpXrReceiverReferenceTime;
}
@@ -1920,8 +1914,9 @@ int RTCPSender::PrepareRTCP(const FeedbackState& feedback_state,
return position;
}
TRACE_EVENT_INSTANT0("webrtc_rtp", "RTCPSender::PLI");
- _pliCount++;
- TRACE_COUNTER_ID1("webrtc_rtp", "RTCP_PLICount", _SSRC, _pliCount);
+ ++packet_type_counter_.pli_packets;
+ TRACE_COUNTER_ID1("webrtc_rtp", "RTCP_PLICount", _SSRC,
+ packet_type_counter_.pli_packets);
}
if(rtcpPacketTypeFlags & kRtcpFir)
{
@@ -1932,9 +1927,9 @@ int RTCPSender::PrepareRTCP(const FeedbackState& feedback_state,
return position;
}
TRACE_EVENT_INSTANT0("webrtc_rtp", "RTCPSender::FIR");
- _fullIntraRequestCount++;
+ ++packet_type_counter_.fir_packets;
TRACE_COUNTER_ID1("webrtc_rtp", "RTCP_FIRCount", _SSRC,
- _fullIntraRequestCount);
+ packet_type_counter_.fir_packets);
}
if(rtcpPacketTypeFlags & kRtcpSli)
{
@@ -2017,8 +2012,9 @@ int RTCPSender::PrepareRTCP(const FeedbackState& feedback_state,
}
TRACE_EVENT_INSTANT1("webrtc_rtp", "RTCPSender::NACK",
"nacks", TRACE_STR_COPY(nackString.c_str()));
- _nackCount++;
- TRACE_COUNTER_ID1("webrtc_rtp", "RTCP_NACKCount", _SSRC, _nackCount);
+ ++packet_type_counter_.nack_packets;
+ TRACE_COUNTER_ID1("webrtc_rtp", "RTCP_NACKCount", _SSRC,
+ packet_type_counter_.nack_packets);
}
if(rtcpPacketTypeFlags & kRtcpXrVoipMetric)
{
@@ -2065,7 +2061,7 @@ bool RTCPSender::PrepareReport(const FeedbackState& feedback_state,
RTCPReportBlock* report_block,
uint32_t* ntp_secs, uint32_t* ntp_frac) {
// Do we have receive statistics to send?
- StreamStatistician::Statistics stats;
+ RtcpStatistics stats;
if (!statistician->GetStatistics(&stats, true))
return false;
report_block->fractionLost = stats.fraction_lost;
@@ -2123,13 +2119,7 @@ int32_t
RTCPSender::SetCSRCs(const uint32_t arrOfCSRC[kRtpCsrcSize],
const uint8_t arrLength)
{
- if(arrLength > kRtpCsrcSize)
- {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
- assert(false);
- return -1;
- }
-
+ assert(arrLength <= kRtpCsrcSize);
CriticalSectionScoped lock(_criticalSectionRTCPSender);
for(int i = 0; i < arrLength;i++)
@@ -2148,7 +2138,7 @@ RTCPSender::SetApplicationSpecificData(const uint8_t subType,
{
if(length %4 != 0)
{
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+ LOG(LS_ERROR) << "Failed to SetApplicationSpecificData.";
return -1;
}
CriticalSectionScoped lock(_criticalSectionRTCPSender);
@@ -2194,17 +2184,10 @@ int32_t RTCPSender::WriteAllReportBlocksToBuffer(
uint8_t& numberOfReportBlocks,
const uint32_t NTPsec,
const uint32_t NTPfrac) {
- // sanity one block
- if(pos + 24 >= IP_PACKET_SIZE) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "%s invalid argument", __FUNCTION__);
- return -1;
- }
numberOfReportBlocks = external_report_blocks_.size();
numberOfReportBlocks += internal_report_blocks_.size();
if ((pos + numberOfReportBlocks * 24) >= IP_PACKET_SIZE) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "%s invalid argument", __FUNCTION__);
+ LOG(LS_WARNING) << "Can't fit all report blocks.";
return -1;
}
pos = WriteReportBlocksToBuffer(rtcpbuffer, pos, internal_report_blocks_);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
index 9ed58244438..cbbc32aac6a 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender.h
@@ -180,6 +180,8 @@ public:
void SetTargetBitrate(unsigned int target_bitrate);
+ void GetPacketTypeCounter(RtcpPacketTypeCounter* packet_counter) const;
+
private:
int32_t SendToNetwork(const uint8_t* dataBuffer, const uint16_t length);
@@ -342,10 +344,7 @@ private:
bool _xrSendVoIPMetric;
RTCPVoIPMetric _xrVoIPMetric;
- // Counters
- uint32_t _nackCount;
- uint32_t _pliCount;
- uint32_t _fullIntraRequestCount;
+ RtcpPacketTypeCounter packet_type_counter_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
index a8b5275fa20..dfb655c5167 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
@@ -278,12 +278,13 @@ class RtcpSenderTest : public ::testing::Test {
: over_use_detector_options_(),
clock_(1335900000),
rtp_payload_registry_(new RTPPayloadRegistry(
- 0, RTPPayloadStrategy::CreateStrategy(false))),
+ RTPPayloadStrategy::CreateStrategy(false))),
remote_bitrate_observer_(),
remote_bitrate_estimator_(
RemoteBitrateEstimatorFactory().Create(
&remote_bitrate_observer_,
&clock_,
+ kMimdControl,
kRemoteBitrateEstimatorMinBitrateBps)),
receive_statistics_(ReceiveStatistics::Create(&clock_)) {
test_transport_ = new TestTransport();
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc
index 705a38b0161..9acab735e50 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtcp_utility.cc
@@ -1266,31 +1266,27 @@ RTCPUtility::RTCPParserV2::ParseFBCommon(const RTCPCommonHeader& header)
}
}
-bool
-RTCPUtility::RTCPParserV2::ParseRPSIItem()
-{
- // RFC 4585 6.3.3. Reference Picture Selection Indication (RPSI)
- /*
- 0 1 2 3
- 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- | PB |0| Payload Type| Native RPSI bit string |
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- | defined per codec ... | Padding (0) |
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- */
+bool RTCPUtility::RTCPParserV2::ParseRPSIItem() {
+
+ // RFC 4585 6.3.3. Reference Picture Selection Indication (RPSI).
+ //
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | PB |0| Payload Type| Native RPSI bit string |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | defined per codec ... | Padding (0) |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
- if (length < 4)
- {
+ if (length < 4) {
_state = State_TopLevel;
EndCurrentBlock();
return false;
}
- if(length > 2+RTCP_RPSI_DATA_SIZE)
- {
+ if (length > 2 + RTCP_RPSI_DATA_SIZE) {
_state = State_TopLevel;
EndCurrentBlock();
@@ -1299,12 +1295,14 @@ RTCPUtility::RTCPParserV2::ParseRPSIItem()
_packetType = kRtcpPsfbRpsiCode;
- uint8_t paddingBits = *_ptrRTCPData++;
+ uint8_t padding_bits = *_ptrRTCPData++;
_packet.RPSI.PayloadType = *_ptrRTCPData++;
- memcpy(_packet.RPSI.NativeBitString, _ptrRTCPData, length-2);
+ memcpy(_packet.RPSI.NativeBitString, _ptrRTCPData, length - 2);
+ _ptrRTCPData += length - 2;
- _packet.RPSI.NumberOfValidBits = uint16_t(length-2)*8 - paddingBits;
+ _packet.RPSI.NumberOfValidBits =
+ static_cast<uint16_t>(length - 2) * 8 - padding_bits;
return true;
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_fec_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_fec_unittest.cc
index 904156e9445..fa847625ead 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_fec_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_fec_unittest.cc
@@ -41,7 +41,7 @@ template <typename T> void ClearList(std::list<T*>* my_list) {
class RtpFecTest : public ::testing::Test {
protected:
RtpFecTest()
- : fec_(new ForwardErrorCorrection(0)), ssrc_(rand()), fec_seq_num_(0) {}
+ : fec_(new ForwardErrorCorrection()), ssrc_(rand()), fec_seq_num_(0) {}
ForwardErrorCorrection* fec_;
int ssrc_;
@@ -86,43 +86,6 @@ class RtpFecTest : public ::testing::Test {
void TearDown();
};
-// TODO(marpan): Consider adding table for input/output to simplify tests.
-
-TEST_F(RtpFecTest, HandleIncorrectInputs) {
- int kNumImportantPackets = 0;
- bool kUseUnequalProtection = false;
- uint8_t kProtectionFactor = 60;
-
- // Media packet list is empty.
- EXPECT_EQ(-1, fec_->GenerateFEC(media_packet_list_, kProtectionFactor,
- kNumImportantPackets, kUseUnequalProtection,
- webrtc::kFecMaskBursty, &fec_packet_list_));
-
- int num_media_packets = 10;
- ConstructMediaPackets(num_media_packets);
-
- kNumImportantPackets = -1;
- // Number of important packets below 0.
- EXPECT_EQ(-1, fec_->GenerateFEC(media_packet_list_, kProtectionFactor,
- kNumImportantPackets, kUseUnequalProtection,
- webrtc::kFecMaskBursty, &fec_packet_list_));
-
- kNumImportantPackets = 12;
- // Number of important packets greater than number of media packets.
- EXPECT_EQ(-1, fec_->GenerateFEC(media_packet_list_, kProtectionFactor,
- kNumImportantPackets, kUseUnequalProtection,
- webrtc::kFecMaskBursty, &fec_packet_list_));
-
- num_media_packets = kMaxNumberMediaPackets + 1;
- ConstructMediaPackets(num_media_packets);
-
- kNumImportantPackets = 0;
- // Number of media packet is above maximum allowed (kMaxNumberMediaPackets).
- EXPECT_EQ(-1, fec_->GenerateFEC(media_packet_list_, kProtectionFactor,
- kNumImportantPackets, kUseUnequalProtection,
- webrtc::kFecMaskBursty, &fec_packet_list_));
-}
-
TEST_F(RtpFecTest, FecRecoveryNoLoss) {
const int kNumImportantPackets = 0;
const bool kUseUnequalProtection = false;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h
index 650c0fad5e5..e4d3dc08362 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h
@@ -28,8 +28,8 @@
#include <queue>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h
index 13eb0e101c7..e146492c72f 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h
@@ -18,9 +18,9 @@
#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_TEST_HELPER_H_
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_TEST_HELPER_H_
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h
index fcc7587c1fc..edffe8aecb6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_extension.h
@@ -22,7 +22,7 @@ const uint16_t kRtpOneByteHeaderExtensionId = 0xBEDE;
const size_t kRtpOneByteHeaderLength = 4;
const size_t kTransmissionTimeOffsetLength = 4;
-const size_t kAudioLevelLength = 2;
+const size_t kAudioLevelLength = 4;
const size_t kAbsoluteSendTimeLength = 4;
struct HeaderExtension {
@@ -37,11 +37,7 @@ struct HeaderExtension {
length = kTransmissionTimeOffsetLength;
break;
case kRtpExtensionAudioLevel:
- // TODO(solenberg): Because of how the audio level extension is handled
- // in RTPSenderAudio::SendAudio(), we cannot set the actual length here
- // but must leave it at zero. The consequence is that any other header
- // extensions registered for an audio channel are effectively ignored.
- // length = kAudioLevelLength;
+ length = kAudioLevelLength;
break;
case kRtpExtensionAbsoluteSendTime:
length = kAbsoluteSendTimeLength;
@@ -66,6 +62,8 @@ class RtpHeaderExtensionMap {
int32_t Deregister(const RTPExtensionType type);
+ bool IsRegistered(RTPExtensionType type) const;
+
int32_t GetType(const uint8_t id, RTPExtensionType* type) const;
int32_t GetId(const RTPExtensionType type, uint8_t* id) const;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc
index d04872582b5..bb24d4dbfb7 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_header_parser.cc
@@ -13,7 +13,6 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
@@ -60,8 +59,6 @@ bool RtpHeaderParserImpl::Parse(const uint8_t* packet, int length,
const bool valid_rtpheader = rtp_parser.Parse(*header, &map);
if (!valid_rtpheader) {
- WEBRTC_TRACE(kTraceDebug, kTraceRtpRtcp, -1,
- "IncomingPacket invalid RTP header");
return false;
}
return true;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc
index 14ca821f7e5..e3515f44543 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc
@@ -18,7 +18,7 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -33,13 +33,21 @@ RTPPacketHistory::RTPPacketHistory(Clock* clock)
}
RTPPacketHistory::~RTPPacketHistory() {
- Free();
+ {
+ CriticalSectionScoped cs(critsect_);
+ Free();
+ }
delete critsect_;
}
-void RTPPacketHistory::SetStorePacketsStatus(bool enable,
+void RTPPacketHistory::SetStorePacketsStatus(bool enable,
uint16_t number_to_store) {
+ CriticalSectionScoped cs(critsect_);
if (enable) {
+ if (store_) {
+ LOG(LS_WARNING) << "Purging packet history in order to re-set status.";
+ Free();
+ }
Allocate(number_to_store);
} else {
Free();
@@ -48,13 +56,7 @@ void RTPPacketHistory::SetStorePacketsStatus(bool enable,
void RTPPacketHistory::Allocate(uint16_t number_to_store) {
assert(number_to_store > 0);
- CriticalSectionScoped cs(critsect_);
- if (store_) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
- "SetStorePacketsStatus already set, number: %d", number_to_store);
- return;
- }
-
+ assert(!store_);
store_ = true;
stored_packets_.resize(number_to_store);
stored_seq_nums_.resize(number_to_store);
@@ -65,13 +67,12 @@ void RTPPacketHistory::Allocate(uint16_t number_to_store) {
}
void RTPPacketHistory::Free() {
- CriticalSectionScoped cs(critsect_);
if (!store_) {
return;
}
std::vector<std::vector<uint8_t> >::iterator it;
- for (it = stored_packets_.begin(); it != stored_packets_.end(); ++it) {
+ for (it = stored_packets_.begin(); it != stored_packets_.end(); ++it) {
it->clear();
}
@@ -130,8 +131,8 @@ int32_t RTPPacketHistory::PutRTPPacket(const uint8_t* packet,
VerifyAndAllocatePacketLength(max_packet_length);
if (packet_length > max_packet_length_) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, -1,
- "Failed to store RTP packet, length: %d", packet_length);
+ LOG(LS_WARNING) << "Failed to store RTP packet with length: "
+ << packet_length;
return -1;
}
@@ -156,46 +157,6 @@ int32_t RTPPacketHistory::PutRTPPacket(const uint8_t* packet,
return 0;
}
-int32_t RTPPacketHistory::ReplaceRTPHeader(const uint8_t* packet,
- uint16_t sequence_number,
- uint16_t rtp_header_length) {
- CriticalSectionScoped cs(critsect_);
- if (!store_) {
- return 0;
- }
-
- assert(packet);
- assert(rtp_header_length > 3);
-
- if (rtp_header_length > max_packet_length_) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "Failed to replace RTP packet, length: %d", rtp_header_length);
- return -1;
- }
-
- int32_t index = 0;
- bool found = FindSeqNum(sequence_number, &index);
- if (!found) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "No match for getting seqNum %u", sequence_number);
- return -1;
- }
-
- uint16_t length = stored_lengths_.at(index);
- if (length == 0 || length > max_packet_length_) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "No match for getting seqNum %u, len %d", sequence_number, length);
- return -1;
- }
- assert(stored_seq_nums_[index] == sequence_number);
-
- // Update RTP header.
- std::vector<std::vector<uint8_t> >::iterator it =
- stored_packets_.begin() + index;
- std::copy(packet, packet + rtp_header_length, it->begin());
- return 0;
-}
-
bool RTPPacketHistory::HasRTPPacket(uint16_t sequence_number) const {
CriticalSectionScoped cs(critsect_);
if (!store_) {
@@ -207,7 +168,7 @@ bool RTPPacketHistory::HasRTPPacket(uint16_t sequence_number) const {
if (!found) {
return false;
}
-
+
uint16_t length = stored_lengths_.at(index);
if (length == 0 || length > max_packet_length_) {
// Invalid length.
@@ -222,6 +183,7 @@ bool RTPPacketHistory::GetPacketAndSetSendTime(uint16_t sequence_number,
uint8_t* packet,
uint16_t* packet_length,
int64_t* stored_time_ms) {
+ assert(*packet_length >= max_packet_length_);
CriticalSectionScoped cs(critsect_);
if (!store_) {
return false;
@@ -230,30 +192,22 @@ bool RTPPacketHistory::GetPacketAndSetSendTime(uint16_t sequence_number,
int32_t index = 0;
bool found = FindSeqNum(sequence_number, &index);
if (!found) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "No match for getting seqNum %u", sequence_number);
+ LOG(LS_WARNING) << "No match for getting seqNum " << sequence_number;
return false;
}
uint16_t length = stored_lengths_.at(index);
- if (length == 0 || length > max_packet_length_) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "No match for getting seqNum %u, len %d", sequence_number, length);
- return false;
- }
-
- if (length > *packet_length) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
- "Input buffer too short for packet %u", sequence_number);
+ assert(length <= max_packet_length_);
+ if (length == 0) {
+ LOG(LS_WARNING) << "No match for getting seqNum " << sequence_number
+ << ", len " << length;
return false;
}
- // Verify elapsed time since last retrieve.
+ // Verify elapsed time since last retrieve.
int64_t now = clock_->TimeInMilliseconds();
if (min_elapsed_time_ms > 0 &&
((now - stored_send_times_.at(index)) < min_elapsed_time_ms)) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "Skip getting packet %u, packet recently resent.", sequence_number);
return false;
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h
index 785e4992b0a..190e5057bc9 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history.h
@@ -18,6 +18,7 @@
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/typedefs.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
namespace webrtc {
@@ -40,14 +41,6 @@ class RTPPacketHistory {
int64_t capture_time_ms,
StorageType type);
- // Replaces the stored RTP packet with matching sequence number with the
- // RTP header of the provided packet.
- // Note: Calling this function assumes that the RTP header length should not
- // have changed since the packet was stored.
- int32_t ReplaceRTPHeader(const uint8_t* packet,
- uint16_t sequence_number,
- uint16_t rtp_header_length);
-
// Gets stored RTP packet corresponding to the input sequence number.
// The packet is copied to the buffer pointed to by ptr_rtp_packet.
// The rtp_packet_length should show the available buffer size.
@@ -74,8 +67,8 @@ class RTPPacketHistory {
private:
void GetPacket(int index, uint8_t* packet, uint16_t* packet_length,
int64_t* stored_time_ms) const;
- void Allocate(uint16_t number_to_store);
- void Free();
+ void Allocate(uint16_t number_to_store) EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
+ void Free() EXCLUSIVE_LOCKS_REQUIRED(*critsect_);
void VerifyAndAllocatePacketLength(uint16_t packet_length);
bool FindSeqNum(uint16_t sequence_number, int32_t* index) const;
int FindBestFittingPacket(uint16_t size) const;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc
index 1682b7c3387..7eb22ff69db 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc
@@ -103,19 +103,6 @@ TEST_F(RtpPacketHistoryTest, PutRtpPacket_TooLargePacketLength) {
kAllowRetransmission));
}
-TEST_F(RtpPacketHistoryTest, GetRtpPacket_TooSmallBuffer) {
- hist_->SetStorePacketsStatus(true, 10);
- uint16_t len = 0;
- int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
- CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
- EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
- capture_time_ms, kAllowRetransmission));
- uint16_t len_out = len - 1;
- int64_t time;
- EXPECT_FALSE(hist_->GetPacketAndSetSendTime(kSeqNum, 0, false, packet_,
- &len_out, &time));
-}
-
TEST_F(RtpPacketHistoryTest, GetRtpPacket_NotStored) {
hist_->SetStorePacketsStatus(true, 10);
uint16_t len = kMaxPacketLength;
@@ -155,42 +142,6 @@ TEST_F(RtpPacketHistoryTest, GetRtpPacket) {
}
}
-TEST_F(RtpPacketHistoryTest, ReplaceRtpHeader) {
- hist_->SetStorePacketsStatus(true, 10);
-
- uint16_t len = 0;
- int64_t capture_time_ms = 1;
- CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
- // Replace should fail, packet is not stored.
- EXPECT_EQ(-1, hist_->ReplaceRTPHeader(packet_, kSeqNum, len));
- EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
- capture_time_ms, kAllowRetransmission));
-
- // Create modified packet and replace.
- len = 0;
- CreateRtpPacket(kSeqNum, kSsrc + 1, kPayload + 2, kTimestamp, packet_, &len);
- EXPECT_EQ(0, hist_->ReplaceRTPHeader(packet_, kSeqNum, len));
-
- uint16_t len_out = kMaxPacketLength;
- int64_t time;
- EXPECT_TRUE(hist_->GetPacketAndSetSendTime(kSeqNum, 0, false, packet_out_,
- &len_out, &time));
- EXPECT_EQ(len, len_out);
- EXPECT_EQ(capture_time_ms, time);
- for (int i = 0; i < len; i++) {
- EXPECT_EQ(packet_[i], packet_out_[i]);
- }
-
- // Replace should fail, too large length.
- EXPECT_EQ(-1, hist_->ReplaceRTPHeader(packet_, kSeqNum,
- kMaxPacketLength + 1));
-
- // Replace should fail, packet is not stored.
- len = 0;
- CreateRtpPacket(kSeqNum + 1, kSsrc, kPayload, kTimestamp, packet_, &len);
- EXPECT_EQ(-1, hist_->ReplaceRTPHeader(packet_, kSeqNum + 1, len));
-}
-
TEST_F(RtpPacketHistoryTest, NoCaptureTime) {
hist_->SetStorePacketsStatus(true, 10);
uint16_t len = 0;
@@ -236,10 +187,10 @@ TEST_F(RtpPacketHistoryTest, MinResendTime) {
capture_time_ms, kAllowRetransmission));
int64_t time;
+ len = kMaxPacketLength;
EXPECT_TRUE(hist_->GetPacketAndSetSendTime(kSeqNum, 100, false, packet_, &len,
&time));
fake_clock_.AdvanceTimeMilliseconds(100);
-
// Time has elapsed.
len = kMaxPacketLength;
EXPECT_TRUE(hist_->GetPacketAndSetSendTime(kSeqNum, 100, false, packet_, &len,
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc
index 1c3b990c5de..db2e4cd31da 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.cc
@@ -10,15 +10,13 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
RTPPayloadRegistry::RTPPayloadRegistry(
- const int32_t id,
RTPPayloadStrategy* rtp_payload_strategy)
: crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- id_(id),
rtp_payload_strategy_(rtp_payload_strategy),
red_payload_type_(-1),
ulpfec_payload_type_(-1),
@@ -60,9 +58,8 @@ int32_t RTPPayloadRegistry::RegisterReceivePayload(
case 77: // 205 Transport layer FB message.
case 78: // 206 Payload-specific FB message.
case 79: // 207 Extended report.
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s invalid payloadtype:%d",
- __FUNCTION__, payload_type);
+ LOG(LS_ERROR) << "Can't register invalid receiver payload type: "
+ << payload_type;
return -1;
default:
break;
@@ -94,9 +91,7 @@ int32_t RTPPayloadRegistry::RegisterReceivePayload(
return 0;
}
}
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s invalid argument payload_type:%d already registered",
- __FUNCTION__, payload_type);
+ LOG(LS_ERROR) << "Payload type already registered: " << payload_type;
return -1;
}
@@ -138,14 +133,8 @@ int32_t RTPPayloadRegistry::DeRegisterReceivePayload(
const int8_t payload_type) {
CriticalSectionScoped cs(crit_sect_.get());
ModuleRTPUtility::PayloadTypeMap::iterator it =
- payload_type_map_.find(payload_type);
-
- if (it == payload_type_map_.end()) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s failed to find payload_type:%d",
- __FUNCTION__, payload_type);
- return -1;
- }
+ payload_type_map_.find(payload_type);
+ assert(it != payload_type_map_.end());
delete it->second;
payload_type_map_.erase(it);
return 0;
@@ -194,11 +183,7 @@ int32_t RTPPayloadRegistry::ReceivePayloadType(
const uint8_t channels,
const uint32_t rate,
int8_t* payload_type) const {
- if (payload_type == NULL) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s invalid argument", __FUNCTION__);
- return -1;
- }
+ assert(payload_type);
size_t payload_name_length = strlen(payload_name);
CriticalSectionScoped cs(crit_sect_.get());
@@ -243,12 +228,6 @@ int32_t RTPPayloadRegistry::ReceivePayloadType(
return -1;
}
-void RTPPayloadRegistry::SetRtxStatus(bool enable, uint32_t ssrc) {
- CriticalSectionScoped cs(crit_sect_.get());
- rtx_ = enable;
- ssrc_rtx_ = ssrc;
-}
-
bool RTPPayloadRegistry::RtxEnabled() const {
CriticalSectionScoped cs(crit_sect_.get());
return rtx_;
@@ -296,17 +275,24 @@ bool RTPPayloadRegistry::RestoreOriginalPacket(uint8_t** restored_packet,
(*restored_packet)[1] |= kRtpMarkerBitMask; // Marker bit is set.
}
} else {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
- "Incorrect RTX configuration, dropping packet.");
+ LOG(LS_WARNING) << "Incorrect RTX configuration, dropping packet.";
return false;
}
}
return true;
}
+void RTPPayloadRegistry::SetRtxSsrc(uint32_t ssrc) {
+ CriticalSectionScoped cs(crit_sect_.get());
+ ssrc_rtx_ = ssrc;
+ rtx_ = true;
+}
+
void RTPPayloadRegistry::SetRtxPayloadType(int payload_type) {
CriticalSectionScoped cs(crit_sect_.get());
+ assert(payload_type >= 0);
payload_type_rtx_ = payload_type;
+ rtx_ = true;
}
bool RTPPayloadRegistry::IsRed(const RTPHeader& header) const {
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
index 96fa80ad842..c03ffcd1f3f 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_payload_registry_unittest.cc
@@ -32,8 +32,7 @@ class RtpPayloadRegistryTest : public ::testing::Test {
void SetUp() {
// Note: the payload registry takes ownership of the strategy.
mock_payload_strategy_ = new testing::NiceMock<MockRTPPayloadStrategy>();
- rtp_payload_registry_.reset(
- new RTPPayloadRegistry(123, mock_payload_strategy_));
+ rtp_payload_registry_.reset(new RTPPayloadRegistry(mock_payload_strategy_));
}
protected:
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc
index 134548518ea..c8104cc3731 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_audio.cc
@@ -15,7 +15,7 @@
#include <string.h> // memcpy()
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
@@ -277,11 +277,8 @@ int32_t RTPReceiverAudio::InvokeOnInitializeDecoder(
specific_payload.Audio.frequency,
specific_payload.Audio.channels,
specific_payload.Audio.rate)) {
- WEBRTC_TRACE(kTraceError,
- kTraceRtpRtcp,
- id,
- "Failed to create video decoder for payload type:%d",
- payload_type);
+ LOG(LS_ERROR) << "Failed to create decoder for payload type: "
+ << payload_name << "/" << payload_type;
return -1;
}
return 0;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc
index 9a276819ab8..d92618f2d57 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_impl.cc
@@ -18,7 +18,7 @@
#include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -39,7 +39,7 @@ RtpReceiver* RtpReceiver::CreateVideoReceiver(
return new RtpReceiverImpl(
id, clock, NullObjectRtpAudioFeedback(), incoming_messages_callback,
rtp_payload_registry,
- RTPReceiverStrategy::CreateVideoStrategy(id, incoming_payload_callback));
+ RTPReceiverStrategy::CreateVideoStrategy(incoming_payload_callback));
}
RtpReceiver* RtpReceiver::CreateAudioReceiver(
@@ -87,8 +87,6 @@ RtpReceiverImpl::RtpReceiverImpl(int32_t id,
assert(incoming_messages_callback);
memset(current_remote_csrc_, 0, sizeof(current_remote_csrc_));
-
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
}
RtpReceiverImpl::~RtpReceiverImpl() {
@@ -96,7 +94,6 @@ RtpReceiverImpl::~RtpReceiverImpl() {
cb_rtp_feedback_->OnIncomingCSRCChanged(id_, current_remote_csrc_[i],
false);
}
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id_, "%s deleted", __FUNCTION__);
}
RTPReceiverStrategy* RtpReceiverImpl::GetMediaReceiver() const {
@@ -127,9 +124,8 @@ int32_t RtpReceiverImpl::RegisterReceivePayload(
if (created_new_payload) {
if (rtp_media_receiver_->OnNewPayloadTypeCreated(payload_name, payload_type,
frequency) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s failed to register payload",
- __FUNCTION__);
+ LOG(LS_ERROR) << "Failed to register payload: " << payload_name << "/"
+ << payload_type;
return -1;
}
}
@@ -182,19 +178,12 @@ bool RtpReceiverImpl::IncomingRtpPacket(
PayloadUnion payload_specific,
bool in_order) {
// Sanity check.
- if (payload_length < 0) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s invalid argument",
- __FUNCTION__);
- return false;
- }
- int8_t first_payload_byte = 0;
- if (payload_length > 0) {
- first_payload_byte = payload[0];
- }
+ assert(payload_length >= 0);
+
// Trigger our callbacks.
CheckSSRCChanged(rtp_header);
+ int8_t first_payload_byte = payload_length > 0 ? payload[0] : 0;
bool is_red = false;
bool should_reset_statistics = false;
@@ -205,14 +194,9 @@ bool RtpReceiverImpl::IncomingRtpPacket(
&should_reset_statistics) == -1) {
if (payload_length == 0) {
// OK, keep-alive packet.
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "%s received keepalive",
- __FUNCTION__);
return true;
}
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
- "%s received invalid payloadtype",
- __FUNCTION__);
+ LOG(LS_WARNING) << "Receiving invalid payload type.";
return false;
}
@@ -347,9 +331,8 @@ void RtpReceiverImpl::CheckSSRCChanged(const RTPHeader& rtp_header) {
id_, rtp_header.payloadType, payload_name,
rtp_header.payload_type_frequency, channels, rate)) {
// New stream, same codec.
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "Failed to create decoder for payload type:%d",
- rtp_header.payloadType);
+ LOG(LS_ERROR) << "Failed to create decoder for payload type: "
+ << rtp_header.payloadType;
}
}
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h
index d8a22579621..09c9b6fc300 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h
@@ -26,8 +26,7 @@ class TelephoneEventHandler;
// This class is not thread-safe and must be protected by its caller.
class RTPReceiverStrategy {
public:
- static RTPReceiverStrategy* CreateVideoStrategy(int32_t id,
- RtpData* data_callback);
+ static RTPReceiverStrategy* CreateVideoStrategy(RtpData* data_callback);
static RTPReceiverStrategy* CreateAudioStrategy(
int32_t id, RtpData* data_callback,
RtpAudioFeedback* incoming_messages_callback);
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
index b733cdb4b05..5bb519f6220 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.cc
@@ -17,19 +17,18 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
RTPReceiverStrategy* RTPReceiverStrategy::CreateVideoStrategy(
- int32_t id, RtpData* data_callback) {
- return new RTPReceiverVideo(id, data_callback);
+ RtpData* data_callback) {
+ return new RTPReceiverVideo(data_callback);
}
-RTPReceiverVideo::RTPReceiverVideo(int32_t id, RtpData* data_callback)
- : RTPReceiverStrategy(data_callback),
- id_(id) {}
+RTPReceiverVideo::RTPReceiverVideo(RtpData* data_callback)
+ : RTPReceiverStrategy(data_callback) {}
RTPReceiverVideo::~RTPReceiverVideo() {
}
@@ -93,11 +92,8 @@ int32_t RTPReceiverVideo::InvokeOnInitializeDecoder(
// For video we just go with default values.
if (-1 == callback->OnInitializeDecoder(
id, payload_type, payload_name, kVideoPayloadTypeFrequency, 1, 0)) {
- WEBRTC_TRACE(kTraceError,
- kTraceRtpRtcp,
- id,
- "Failed to create video decoder for payload type:%d",
- payload_type);
+ LOG(LS_ERROR) << "Failed to created decoder for payload type: "
+ << payload_type;
return -1;
}
return 0;
@@ -111,13 +107,6 @@ int32_t RTPReceiverVideo::ParseVideoCodecSpecific(
RtpVideoCodecTypes video_type,
int64_t now_ms,
bool is_first_packet) {
- WEBRTC_TRACE(kTraceStream,
- kTraceRtpRtcp,
- id_,
- "%s(timestamp:%u)",
- __FUNCTION__,
- rtp_header->header.timestamp);
-
switch (rtp_header->type.Video.codec) {
case kRtpVideoGeneric:
rtp_header->type.Video.isFirstPacket = is_first_packet;
@@ -170,13 +159,8 @@ int32_t RTPReceiverVideo::ReceiveVp8Codec(WebRtcRTPHeader* rtp_header,
const uint8_t* payload_data,
uint16_t payload_data_length) {
ModuleRTPUtility::RTPPayload parsed_packet;
- uint32_t id;
- {
- CriticalSectionScoped cs(crit_sect_.get());
- id = id_;
- }
ModuleRTPUtility::RTPPayloadParser rtp_payload_parser(
- kRtpVideoVp8, payload_data, payload_data_length, id);
+ kRtpVideoVp8, payload_data, payload_data_length);
if (!rtp_payload_parser.Parse(parsed_packet))
return -1;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h
index ab69b40ee6f..4d81cb3972e 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_receiver_video.h
@@ -22,7 +22,7 @@ namespace webrtc {
class RTPReceiverVideo : public RTPReceiverStrategy {
public:
- RTPReceiverVideo(const int32_t id, RtpData* data_callback);
+ RTPReceiverVideo(RtpData* data_callback);
virtual ~RTPReceiverVideo();
@@ -80,8 +80,6 @@ class RTPReceiverVideo : public RTPReceiverStrategy {
RtpVideoCodecTypes video_type,
int64_t now_ms,
bool is_first_packet);
-
- int32_t id_;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp.gypi b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp.gypi
index 070845bc795..dcd65988058 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp.gypi
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp.gypi
@@ -20,6 +20,7 @@
# Common
'../interface/fec_receiver.h',
'../interface/receive_statistics.h',
+ '../interface/remote_ntp_time_estimator.h',
'../interface/rtp_header_parser.h',
'../interface/rtp_payload_registry.h',
'../interface/rtp_receiver.h',
@@ -32,10 +33,13 @@
'fec_receiver_impl.h',
'receive_statistics_impl.cc',
'receive_statistics_impl.h',
+ 'remote_ntp_time_estimator.cc',
'rtp_header_parser.cc',
'rtp_rtcp_config.h',
'rtp_rtcp_impl.cc',
'rtp_rtcp_impl.h',
+ 'rtcp_packet.cc',
+ 'rtcp_packet.h',
'rtcp_receiver.cc',
'rtcp_receiver.h',
'rtcp_receiver_help.cc',
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
index 89e9eb294f8..469a41e1d36 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
@@ -17,11 +17,6 @@
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace.h"
-#ifdef MATLAB
-#include "webrtc/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.h"
-extern MatlabEngine eng; // Global variable defined elsewhere.
-#endif
-
#ifdef _WIN32
// Disable warning C4355: 'this' : used in base member initializer list.
#pragma warning(disable : 4355)
@@ -66,7 +61,9 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
configuration.outgoing_transport,
configuration.audio_messages,
configuration.paced_sender),
- rtcp_sender_(configuration.id, configuration.audio, configuration.clock,
+ rtcp_sender_(configuration.id,
+ configuration.audio,
+ configuration.clock,
configuration.receive_statistics),
rtcp_receiver_(configuration.id, configuration.clock, this),
clock_(configuration.clock),
@@ -83,15 +80,13 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
CriticalSectionWrapper::CreateCriticalSection()),
default_module_(
static_cast<ModuleRtpRtcpImpl*>(configuration.default_module)),
+ padding_index_(-1), // Start padding at the first child module.
nack_method_(kNackOff),
nack_last_time_sent_full_(0),
nack_last_seq_number_sent_(0),
simulcast_(false),
key_frame_req_method_(kKeyFrameReqFirRtp),
remote_bitrate_(configuration.remote_bitrate_estimator),
-#ifdef MATLAB
- , plot1_(NULL),
-#endif
rtt_stats_(configuration.rtt_stats),
critical_section_rtt_(CriticalSectionWrapper::CreateCriticalSection()),
rtt_ms_(0) {
@@ -110,13 +105,9 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
uint32_t SSRC = rtp_sender_.SSRC();
rtcp_sender_.SetSSRC(SSRC);
SetRtcpReceiverSsrcs(SSRC);
-
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id_, "%s created", __FUNCTION__);
}
ModuleRtpRtcpImpl::~ModuleRtpRtcpImpl() {
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id_, "%s deleted", __FUNCTION__);
-
// All child modules MUST be deleted before deleting the default.
assert(child_modules_.empty());
@@ -125,21 +116,9 @@ ModuleRtpRtcpImpl::~ModuleRtpRtcpImpl() {
if (default_module_) {
default_module_->DeRegisterChildModule(this);
}
-#ifdef MATLAB
- if (plot1_) {
- eng.DeletePlot(plot1_);
- plot1_ = NULL;
- }
-#endif
}
void ModuleRtpRtcpImpl::RegisterChildModule(RtpRtcp* module) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "RegisterChildModule(module:0x%x)",
- module);
-
CriticalSectionScoped lock(
critical_section_module_ptrs_.get());
CriticalSectionScoped double_lock(
@@ -153,17 +132,12 @@ void ModuleRtpRtcpImpl::RegisterChildModule(RtpRtcp* module) {
}
void ModuleRtpRtcpImpl::DeRegisterChildModule(RtpRtcp* remove_module) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "DeRegisterChildModule(module:0x%x)", remove_module);
-
CriticalSectionScoped lock(
critical_section_module_ptrs_.get());
CriticalSectionScoped double_lock(
critical_section_module_ptrs_feedback_.get());
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
RtpRtcp* module = *it;
if (module == remove_module) {
@@ -191,13 +165,7 @@ int32_t ModuleRtpRtcpImpl::Process() {
last_bitrate_process_time_ = now;
}
- bool default_instance = false;
- {
- CriticalSectionScoped cs(critical_section_module_ptrs_.get());
- if (!child_modules_.empty())
- default_instance = true;
- }
- if (!default_instance) {
+ if (!IsDefaultModule()) {
bool process_rtt = now >= last_rtt_process_time_ + kRtpRtcpRttProcessTimeMs;
if (rtcp_sender_.Sending()) {
// Process RTT if we have received a receiver report and we haven't
@@ -269,16 +237,18 @@ int32_t ModuleRtpRtcpImpl::Process() {
return 0;
}
-int32_t ModuleRtpRtcpImpl::SetRTXSendStatus(int mode, bool set_ssrc,
- uint32_t ssrc) {
- rtp_sender_.SetRTXStatus(mode, set_ssrc, ssrc);
- return 0;
+void ModuleRtpRtcpImpl::SetRTXSendStatus(int mode) {
+ rtp_sender_.SetRTXStatus(mode);
}
-int32_t ModuleRtpRtcpImpl::RTXSendStatus(int* mode, uint32_t* ssrc,
- int* payload_type) const {
+void ModuleRtpRtcpImpl::RTXSendStatus(int* mode,
+ uint32_t* ssrc,
+ int* payload_type) const {
rtp_sender_.RTXStatus(mode, ssrc, payload_type);
- return 0;
+}
+
+void ModuleRtpRtcpImpl::SetRtxSsrc(uint32_t ssrc) {
+ rtp_sender_.SetRtxSsrc(ssrc);
}
void ModuleRtpRtcpImpl::SetRtxSendPayloadType(int payload_type) {
@@ -288,29 +258,12 @@ void ModuleRtpRtcpImpl::SetRtxSendPayloadType(int payload_type) {
int32_t ModuleRtpRtcpImpl::IncomingRtcpPacket(
const uint8_t* rtcp_packet,
const uint16_t length) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "IncomingRtcpPacket(packet_length:%u)", length);
- // Minimum RTP is 12 bytes.
- // Minimum RTCP is 8 bytes (RTCP BYE).
- if (length == 8) {
- WEBRTC_TRACE(kTraceDebug, kTraceRtpRtcp, -1,
- "IncomingRtcpPacket invalid length");
- return false;
- }
- // Check RTP version.
- const uint8_t version = rtcp_packet[0] >> 6;
- if (version != 2) {
- WEBRTC_TRACE(kTraceDebug, kTraceRtpRtcp, -1,
- "IncomingRtcpPacket invalid RTP version");
- return false;
- }
// Allow receive of non-compound RTCP packets.
RTCPUtility::RTCPParserV2 rtcp_parser(rtcp_packet, length, true);
const bool valid_rtcpheader = rtcp_parser.IsValid();
if (!valid_rtcpheader) {
- WEBRTC_TRACE(kTraceDebug, kTraceRtpRtcp, id_,
- "IncomingRtcpPacket invalid RTCP packet");
+ LOG(LS_WARNING) << "Incoming invalid RTCP packet";
return -1;
}
RTCPHelp::RTCPPacketInformation rtcp_packet_information;
@@ -324,14 +277,6 @@ int32_t ModuleRtpRtcpImpl::IncomingRtcpPacket(
int32_t ModuleRtpRtcpImpl::RegisterSendPayload(
const CodecInst& voice_codec) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "RegisterSendPayload(pl_name:%s pl_type:%d frequency:%u)",
- voice_codec.plname,
- voice_codec.pltype,
- voice_codec.plfreq);
-
return rtp_sender_.RegisterPayload(
voice_codec.plname,
voice_codec.pltype,
@@ -342,13 +287,6 @@ int32_t ModuleRtpRtcpImpl::RegisterSendPayload(
int32_t ModuleRtpRtcpImpl::RegisterSendPayload(
const VideoCodec& video_codec) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "RegisterSendPayload(pl_name:%s pl_type:%d)",
- video_codec.plName,
- video_codec.plType);
-
send_video_codec_ = video_codec;
{
// simulcast_ is accessed when accessing child_modules_, so this write needs
@@ -365,11 +303,6 @@ int32_t ModuleRtpRtcpImpl::RegisterSendPayload(
int32_t ModuleRtpRtcpImpl::DeRegisterSendPayload(
const int8_t payload_type) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "DeRegisterSendPayload(%d)", payload_type);
-
return rtp_sender_.DeRegisterSendPayload(payload_type);
}
@@ -378,58 +311,37 @@ int8_t ModuleRtpRtcpImpl::SendPayloadType() const {
}
uint32_t ModuleRtpRtcpImpl::StartTimestamp() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "StartTimestamp()");
-
return rtp_sender_.StartTimestamp();
}
// Configure start timestamp, default is a random number.
int32_t ModuleRtpRtcpImpl::SetStartTimestamp(
const uint32_t timestamp) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetStartTimestamp(%d)",
- timestamp);
rtcp_sender_.SetStartTimestamp(timestamp);
rtp_sender_.SetStartTimestamp(timestamp, true);
return 0; // TODO(pwestin): change to void.
}
uint16_t ModuleRtpRtcpImpl::SequenceNumber() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SequenceNumber()");
-
return rtp_sender_.SequenceNumber();
}
// Set SequenceNumber, default is a random number.
int32_t ModuleRtpRtcpImpl::SetSequenceNumber(
const uint16_t seq_num) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetSequenceNumber(%d)",
- seq_num);
-
rtp_sender_.SetSequenceNumber(seq_num);
return 0; // TODO(pwestin): change to void.
}
uint32_t ModuleRtpRtcpImpl::SSRC() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SSRC()");
-
return rtp_sender_.SSRC();
}
// Configure SSRC, default is a random number.
-int32_t ModuleRtpRtcpImpl::SetSSRC(const uint32_t ssrc) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SetSSRC(%d)", ssrc);
-
+void ModuleRtpRtcpImpl::SetSSRC(const uint32_t ssrc) {
rtp_sender_.SetSSRC(ssrc);
rtcp_sender_.SetSSRC(ssrc);
SetRtcpReceiverSsrcs(ssrc);
-
- return 0; // TODO(pwestin): change to void.
}
int32_t ModuleRtpRtcpImpl::SetCSRCStatus(const bool include) {
@@ -440,27 +352,17 @@ int32_t ModuleRtpRtcpImpl::SetCSRCStatus(const bool include) {
int32_t ModuleRtpRtcpImpl::CSRCs(
uint32_t arr_of_csrc[kRtpCsrcSize]) const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "CSRCs()");
-
return rtp_sender_.CSRCs(arr_of_csrc);
}
int32_t ModuleRtpRtcpImpl::SetCSRCs(
const uint32_t arr_of_csrc[kRtpCsrcSize],
const uint8_t arr_length) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetCSRCs(arr_length:%d)",
- arr_length);
-
- const bool default_instance(child_modules_.empty() ? false : true);
-
- if (default_instance) {
+ if (IsDefaultModule()) {
// For default we need to update all child modules too.
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
RtpRtcp* module = *it;
if (module) {
@@ -469,10 +371,6 @@ int32_t ModuleRtpRtcpImpl::SetCSRCs(
it++;
}
} else {
- for (int i = 0; i < arr_length; ++i) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "\tidx:%d CSRC:%u", i,
- arr_of_csrc[i]);
- }
rtcp_sender_.SetCSRCs(arr_of_csrc, arr_length);
rtp_sender_.SetCSRCs(arr_of_csrc, arr_length);
}
@@ -480,38 +378,23 @@ int32_t ModuleRtpRtcpImpl::SetCSRCs(
}
uint32_t ModuleRtpRtcpImpl::PacketCountSent() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "PacketCountSent()");
-
return rtp_sender_.Packets();
}
uint32_t ModuleRtpRtcpImpl::ByteCountSent() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "ByteCountSent()");
-
return rtp_sender_.Bytes();
}
int ModuleRtpRtcpImpl::CurrentSendFrequencyHz() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "CurrentSendFrequencyHz()");
-
return rtp_sender_.SendPayloadFrequency();
}
int32_t ModuleRtpRtcpImpl::SetSendingStatus(const bool sending) {
- if (sending) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetSendingStatus(sending)");
- } else {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetSendingStatus(stopped)");
- }
if (rtcp_sender_.Sending() != sending) {
// Sends RTCP BYE when going from true to false
RTCPSender::FeedbackState feedback_state(this);
if (rtcp_sender_.SetSendingStatus(feedback_state, sending) != 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
- "Failed to send RTCP BYE");
+ LOG(LS_WARNING) << "Failed to send RTCP BYE";
}
collision_detected_ = false;
@@ -536,33 +419,21 @@ int32_t ModuleRtpRtcpImpl::SetSendingStatus(const bool sending) {
}
bool ModuleRtpRtcpImpl::Sending() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "Sending()");
-
return rtcp_sender_.Sending();
}
int32_t ModuleRtpRtcpImpl::SetSendingMediaStatus(const bool sending) {
- if (sending) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetSendingMediaStatus(sending)");
- } else {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetSendingMediaStatus(stopped)");
- }
rtp_sender_.SetSendingMediaStatus(sending);
return 0;
}
bool ModuleRtpRtcpImpl::SendingMedia() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "Sending()");
-
- const bool have_child_modules(child_modules_.empty() ? false : true);
- if (!have_child_modules) {
+ if (!IsDefaultModule()) {
return rtp_sender_.SendingMedia();
}
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
- std::list<ModuleRtpRtcpImpl*>::const_iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::const_iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
RTPSender& rtp_sender = (*it)->rtp_sender_;
if (rtp_sender.SendingMedia()) {
@@ -582,17 +453,9 @@ int32_t ModuleRtpRtcpImpl::SendOutgoingData(
uint32_t payload_size,
const RTPFragmentationHeader* fragmentation,
const RTPVideoHeader* rtp_video_hdr) {
- WEBRTC_TRACE(
- kTraceStream,
- kTraceRtpRtcp,
- id_,
- "SendOutgoingData(frame_type:%d payload_type:%d time_stamp:%u size:%u)",
- frame_type, payload_type, time_stamp, payload_size);
-
rtcp_sender_.SetLastRtpTime(time_stamp, capture_time_ms);
- const bool have_child_modules(child_modules_.empty() ? false : true);
- if (!have_child_modules) {
+ if (!IsDefaultModule()) {
// Don't send RTCP from default module.
if (rtcp_sender_.TimeToSendRTCPReport(kVideoFrameKey == frame_type)) {
RTCPSender::FeedbackState feedback_state(this);
@@ -615,7 +478,7 @@ int32_t ModuleRtpRtcpImpl::SendOutgoingData(
return -1;
}
int idx = 0;
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
for (; idx < rtp_video_hdr->simulcastIdx; ++it) {
if (it == child_modules_.end()) {
return -1;
@@ -633,11 +496,6 @@ int32_t ModuleRtpRtcpImpl::SendOutgoingData(
if (it == child_modules_.end()) {
return -1;
}
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SendOutgoingData(SimulcastIdx:%u size:%u, ssrc:0x%x)",
- idx, payload_size, (*it)->rtp_sender_.SSRC());
return (*it)->SendOutgoingData(frame_type,
payload_type,
time_stamp,
@@ -647,7 +505,7 @@ int32_t ModuleRtpRtcpImpl::SendOutgoingData(
fragmentation,
rtp_video_hdr);
} else {
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
// Send to all "child" modules
while (it != child_modules_.end()) {
if ((*it)->SendingMedia()) {
@@ -670,19 +528,7 @@ bool ModuleRtpRtcpImpl::TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms,
bool retransmission) {
- WEBRTC_TRACE(
- kTraceStream,
- kTraceRtpRtcp,
- id_,
- "TimeToSendPacket(ssrc:0x%x sequence_number:%u capture_time_ms:%ll)",
- ssrc, sequence_number, capture_time_ms);
-
- bool no_child_modules = false;
- {
- CriticalSectionScoped lock(critical_section_module_ptrs_.get());
- no_child_modules = child_modules_.empty();
- }
- if (no_child_modules) {
+ if (!IsDefaultModule()) {
// Don't send from default module.
if (SendingMedia() && ssrc == rtp_sender_.SSRC()) {
return rtp_sender_.TimeToSendPacket(sequence_number, capture_time_ms,
@@ -690,7 +536,7 @@ bool ModuleRtpRtcpImpl::TimeToSendPacket(uint32_t ssrc,
}
} else {
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
if ((*it)->SendingMedia() && ssrc == (*it)->rtp_sender_.SSRC()) {
return (*it)->rtp_sender_.TimeToSendPacket(sequence_number,
@@ -705,28 +551,18 @@ bool ModuleRtpRtcpImpl::TimeToSendPacket(uint32_t ssrc,
}
int ModuleRtpRtcpImpl::TimeToSendPadding(int bytes) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_, "TimeToSendPadding(bytes: %d)",
- bytes);
-
- bool no_child_modules = false;
- {
- CriticalSectionScoped lock(critical_section_module_ptrs_.get());
- no_child_modules = child_modules_.empty();
- }
- if (no_child_modules) {
+ if (!IsDefaultModule()) {
// Don't send from default module.
if (SendingMedia()) {
return rtp_sender_.TimeToSendPadding(bytes);
}
} else {
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
- while (it != child_modules_.end()) {
+ for (size_t i = 0; i < child_modules_.size(); ++i) {
// Send padding on one of the modules sending media.
- if ((*it)->SendingMedia()) {
- return (*it)->rtp_sender_.TimeToSendPadding(bytes);
+ if (child_modules_[i]->SendingMedia()) {
+ return child_modules_[i]->rtp_sender_.TimeToSendPadding(bytes);
}
- ++it;
}
}
return 0;
@@ -737,7 +573,7 @@ bool ModuleRtpRtcpImpl::GetSendSideDelay(int* avg_send_delay_ms,
assert(avg_send_delay_ms);
assert(max_send_delay_ms);
- if (!child_modules_.empty()) {
+ if (IsDefaultModule()) {
// This API is only supported for child modules.
return false;
}
@@ -745,26 +581,17 @@ bool ModuleRtpRtcpImpl::GetSendSideDelay(int* avg_send_delay_ms,
}
uint16_t ModuleRtpRtcpImpl::MaxPayloadLength() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "MaxPayloadLength()");
-
return rtp_sender_.MaxPayloadLength();
}
uint16_t ModuleRtpRtcpImpl::MaxDataPayloadLength() const {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "MaxDataPayloadLength()");
-
// Assuming IP/UDP.
uint16_t min_data_payload_length = IP_PACKET_SIZE - 28;
- const bool default_instance(child_modules_.empty() ? false : true);
- if (default_instance) {
+ if (IsDefaultModule()) {
// For default we need to update all child modules too.
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
- std::list<ModuleRtpRtcpImpl*>::const_iterator it =
- child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::const_iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
RtpRtcp* module = *it;
if (module) {
@@ -789,13 +616,6 @@ int32_t ModuleRtpRtcpImpl::SetTransportOverhead(
const bool tcp,
const bool ipv6,
const uint8_t authentication_overhead) {
- WEBRTC_TRACE(
- kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetTransportOverhead(TCP:%d, IPV6:%d authentication_overhead:%u)",
- tcp, ipv6, authentication_overhead);
-
uint16_t packet_overhead = 0;
if (ipv6) {
packet_overhead = 40;
@@ -827,12 +647,8 @@ int32_t ModuleRtpRtcpImpl::SetTransportOverhead(
}
int32_t ModuleRtpRtcpImpl::SetMaxTransferUnit(const uint16_t mtu) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SetMaxTransferUnit(%u)",
- mtu);
-
if (mtu > IP_PACKET_SIZE) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
- "Invalid in argument to SetMaxTransferUnit(%u)", mtu);
+ LOG(LS_ERROR) << "Invalid mtu: " << mtu;
return -1;
}
return rtp_sender_.SetMaxPayloadLength(mtu - packet_overhead_,
@@ -840,8 +656,6 @@ int32_t ModuleRtpRtcpImpl::SetMaxTransferUnit(const uint16_t mtu) {
}
RTCPMethod ModuleRtpRtcpImpl::RTCP() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RTCP()");
-
if (rtcp_sender_.Status() != kRtcpOff) {
return rtcp_receiver_.Status();
}
@@ -850,9 +664,6 @@ RTCPMethod ModuleRtpRtcpImpl::RTCP() const {
// Configure RTCP status i.e on/off.
int32_t ModuleRtpRtcpImpl::SetRTCPStatus(const RTCPMethod method) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SetRTCPStatus(%d)",
- method);
-
if (rtcp_sender_.SetRTCPStatus(method) == 0) {
return rtcp_receiver_.SetRTCPStatus(method);
}
@@ -866,36 +677,26 @@ uint32_t ModuleRtpRtcpImpl::LastSendReport(
}
int32_t ModuleRtpRtcpImpl::SetCNAME(const char c_name[RTCP_CNAME_SIZE]) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SetCNAME(%s)", c_name);
return rtcp_sender_.SetCNAME(c_name);
}
int32_t ModuleRtpRtcpImpl::CNAME(char c_name[RTCP_CNAME_SIZE]) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "CNAME()");
return rtcp_sender_.CNAME(c_name);
}
int32_t ModuleRtpRtcpImpl::AddMixedCNAME(
const uint32_t ssrc,
const char c_name[RTCP_CNAME_SIZE]) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "AddMixedCNAME(SSRC:%u)", ssrc);
-
return rtcp_sender_.AddMixedCNAME(ssrc, c_name);
}
int32_t ModuleRtpRtcpImpl::RemoveMixedCNAME(const uint32_t ssrc) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "RemoveMixedCNAME(SSRC:%u)", ssrc);
return rtcp_sender_.RemoveMixedCNAME(ssrc);
}
int32_t ModuleRtpRtcpImpl::RemoteCNAME(
const uint32_t remote_ssrc,
char c_name[RTCP_CNAME_SIZE]) const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "RemoteCNAME(SSRC:%u)", remote_ssrc);
-
return rtcp_receiver_.CNAME(remote_ssrc, c_name);
}
@@ -905,8 +706,6 @@ int32_t ModuleRtpRtcpImpl::RemoteNTP(
uint32_t* rtcp_arrival_time_secs,
uint32_t* rtcp_arrival_time_frac,
uint32_t* rtcp_timestamp) const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoteNTP()");
-
return rtcp_receiver_.NTP(received_ntpsecs,
received_ntpfrac,
rtcp_arrival_time_secs,
@@ -920,24 +719,21 @@ int32_t ModuleRtpRtcpImpl::RTT(const uint32_t remote_ssrc,
uint16_t* avg_rtt,
uint16_t* min_rtt,
uint16_t* max_rtt) const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RTT()");
-
- return rtcp_receiver_.RTT(remote_ssrc, rtt, avg_rtt, min_rtt, max_rtt);
+ int32_t ret = rtcp_receiver_.RTT(remote_ssrc, rtt, avg_rtt, min_rtt, max_rtt);
+ if (rtt && *rtt == 0) {
+ // Try to get RTT from RtcpRttStats class.
+ *rtt = static_cast<uint16_t>(rtt_ms());
+ }
+ return ret;
}
// Reset RoundTripTime statistics.
int32_t ModuleRtpRtcpImpl::ResetRTT(const uint32_t remote_ssrc) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "ResetRTT(SSRC:%u)",
- remote_ssrc);
-
return rtcp_receiver_.ResetRTT(remote_ssrc);
}
// Reset RTP data counters for the sending side.
int32_t ModuleRtpRtcpImpl::ResetSendDataCountersRTP() {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "ResetSendDataCountersRTP()");
-
rtp_sender_.ResetDataCounters();
return 0; // TODO(pwestin): change to void.
}
@@ -945,8 +741,6 @@ int32_t ModuleRtpRtcpImpl::ResetSendDataCountersRTP() {
// Force a send of an RTCP packet.
// Normal SR and RR are triggered via the process function.
int32_t ModuleRtpRtcpImpl::SendRTCP(uint32_t rtcp_packet_type) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SendRTCP(0x%x)",
- rtcp_packet_type);
RTCPSender::FeedbackState feedback_state(this);
return rtcp_sender_.SendRTCP(feedback_state, rtcp_packet_type);
}
@@ -956,24 +750,16 @@ int32_t ModuleRtpRtcpImpl::SetRTCPApplicationSpecificData(
const uint32_t name,
const uint8_t* data,
const uint16_t length) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetRTCPApplicationSpecificData(sub_type:%d name:0x%x)",
- sub_type, name);
-
return rtcp_sender_.SetApplicationSpecificData(sub_type, name, data, length);
}
// (XR) VOIP metric.
int32_t ModuleRtpRtcpImpl::SetRTCPVoIPMetrics(
const RTCPVoIPMetric* voip_metric) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SetRTCPVoIPMetrics()");
-
return rtcp_sender_.SetRTCPVoIPMetrics(voip_metric);
}
void ModuleRtpRtcpImpl::SetRtcpXrRrtrStatus(bool enable) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetRtcpXrRrtrStatus(%s)", enable ? "true" : "false");
return rtcp_sender_.SendRtcpXrReceiverReferenceTime(enable);
}
@@ -984,8 +770,6 @@ bool ModuleRtpRtcpImpl::RtcpXrRrtrStatus() const {
int32_t ModuleRtpRtcpImpl::DataCountersRTP(
uint32_t* bytes_sent,
uint32_t* packets_sent) const {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_, "DataCountersRTP()");
-
if (bytes_sent) {
*bytes_sent = rtp_sender_.Bytes();
}
@@ -996,77 +780,54 @@ int32_t ModuleRtpRtcpImpl::DataCountersRTP(
}
int32_t ModuleRtpRtcpImpl::RemoteRTCPStat(RTCPSenderInfo* sender_info) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoteRTCPStat()");
-
return rtcp_receiver_.SenderInfoReceived(sender_info);
}
// Received RTCP report.
int32_t ModuleRtpRtcpImpl::RemoteRTCPStat(
std::vector<RTCPReportBlock>* receive_blocks) const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoteRTCPStat()");
-
return rtcp_receiver_.StatisticsReceived(receive_blocks);
}
int32_t ModuleRtpRtcpImpl::AddRTCPReportBlock(
const uint32_t ssrc,
const RTCPReportBlock* report_block) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "AddRTCPReportBlock()");
-
return rtcp_sender_.AddExternalReportBlock(ssrc, report_block);
}
int32_t ModuleRtpRtcpImpl::RemoveRTCPReportBlock(
const uint32_t ssrc) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "RemoveRTCPReportBlock()");
-
return rtcp_sender_.RemoveExternalReportBlock(ssrc);
}
+void ModuleRtpRtcpImpl::GetRtcpPacketTypeCounters(
+ RtcpPacketTypeCounter* packets_sent,
+ RtcpPacketTypeCounter* packets_received) const {
+ rtcp_sender_.GetPacketTypeCounter(packets_sent);
+ rtcp_receiver_.GetPacketTypeCounter(packets_received);
+}
+
// (REMB) Receiver Estimated Max Bitrate.
bool ModuleRtpRtcpImpl::REMB() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "REMB()");
-
return rtcp_sender_.REMB();
}
int32_t ModuleRtpRtcpImpl::SetREMBStatus(const bool enable) {
- if (enable) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetREMBStatus(enable)");
- } else {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetREMBStatus(disable)");
- }
return rtcp_sender_.SetREMBStatus(enable);
}
int32_t ModuleRtpRtcpImpl::SetREMBData(const uint32_t bitrate,
const uint8_t number_of_ssrc,
const uint32_t* ssrc) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetREMBData(bitrate:%d,?,?)", bitrate);
return rtcp_sender_.SetREMBData(bitrate, number_of_ssrc, ssrc);
}
// (IJ) Extended jitter report.
bool ModuleRtpRtcpImpl::IJ() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "IJ()");
-
return rtcp_sender_.IJ();
}
int32_t ModuleRtpRtcpImpl::SetIJStatus(const bool enable) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetIJStatus(%s)", enable ? "true" : "false");
-
return rtcp_sender_.SetIJStatus(enable);
}
@@ -1083,25 +844,14 @@ int32_t ModuleRtpRtcpImpl::DeregisterSendRtpHeaderExtension(
// (TMMBR) Temporary Max Media Bit Rate.
bool ModuleRtpRtcpImpl::TMMBR() const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "TMMBR()");
-
return rtcp_sender_.TMMBR();
}
int32_t ModuleRtpRtcpImpl::SetTMMBRStatus(const bool enable) {
- if (enable) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetTMMBRStatus(enable)");
- } else {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetTMMBRStatus(disable)");
- }
return rtcp_sender_.SetTMMBRStatus(enable);
}
int32_t ModuleRtpRtcpImpl::SetTMMBN(const TMMBRSet* bounding_set) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SetTMMBN()");
-
uint32_t max_bitrate_kbit =
rtp_sender_.MaxConfiguredBitrateVideo() / 1000;
return rtcp_sender_.SetTMMBN(bounding_set, max_bitrate_kbit);
@@ -1109,32 +859,18 @@ int32_t ModuleRtpRtcpImpl::SetTMMBN(const TMMBRSet* bounding_set) {
// Returns the currently configured retransmission mode.
int ModuleRtpRtcpImpl::SelectiveRetransmissions() const {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SelectiveRetransmissions()");
return rtp_sender_.SelectiveRetransmissions();
}
// Enable or disable a retransmission mode, which decides which packets will
// be retransmitted if NACKed.
int ModuleRtpRtcpImpl::SetSelectiveRetransmissions(uint8_t settings) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetSelectiveRetransmissions(%u)",
- settings);
return rtp_sender_.SetSelectiveRetransmissions(settings);
}
// Send a Negative acknowledgment packet.
int32_t ModuleRtpRtcpImpl::SendNACK(const uint16_t* nack_list,
const uint16_t size) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SendNACK(size:%u)", size);
-
// Use RTT from RtcpRttStats class if provided.
uint16_t rtt = rtt_ms();
if (rtt == 0) {
@@ -1188,14 +924,6 @@ int32_t ModuleRtpRtcpImpl::SendNACK(const uint16_t* nack_list,
int32_t ModuleRtpRtcpImpl::SetStorePacketsStatus(
const bool enable,
const uint16_t number_to_store) {
- if (enable) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetStorePacketsStatus(enable, number_to_store:%d)",
- number_to_store);
- } else {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetStorePacketsStatus(disable)");
- }
rtp_sender_.SetStorePacketsStatus(enable, number_to_store);
return 0; // TODO(pwestin): change to void.
}
@@ -1219,21 +947,11 @@ int32_t ModuleRtpRtcpImpl::SendTelephoneEventOutband(
const uint8_t key,
const uint16_t time_ms,
const uint8_t level) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SendTelephoneEventOutband(key:%u, time_ms:%u, level:%u)", key,
- time_ms, level);
-
return rtp_sender_.SendTelephoneEvent(key, time_ms, level);
}
bool ModuleRtpRtcpImpl::SendTelephoneEventActive(
int8_t& telephone_event) const {
-
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SendTelephoneEventActive()");
-
return rtp_sender_.SendTelephoneEventActive(&telephone_event);
}
@@ -1241,68 +959,23 @@ bool ModuleRtpRtcpImpl::SendTelephoneEventActive(
// packet in silence (CNG).
int32_t ModuleRtpRtcpImpl::SetAudioPacketSize(
const uint16_t packet_size_samples) {
-
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetAudioPacketSize(%u)",
- packet_size_samples);
-
return rtp_sender_.SetAudioPacketSize(packet_size_samples);
}
-int32_t ModuleRtpRtcpImpl::SetRTPAudioLevelIndicationStatus(
- const bool enable,
- const uint8_t id) {
-
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetRTPAudioLevelIndicationStatus(enable=%d, ID=%u)",
- enable,
- id);
-
- return rtp_sender_.SetAudioLevelIndicationStatus(enable, id);
-}
-
-int32_t ModuleRtpRtcpImpl::GetRTPAudioLevelIndicationStatus(
- bool& enable,
- uint8_t& id) const {
-
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "GetRTPAudioLevelIndicationStatus()");
- return rtp_sender_.AudioLevelIndicationStatus(&enable, &id);
-}
-
int32_t ModuleRtpRtcpImpl::SetAudioLevel(
const uint8_t level_d_bov) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetAudioLevel(level_d_bov:%u)",
- level_d_bov);
return rtp_sender_.SetAudioLevel(level_d_bov);
}
// Set payload type for Redundant Audio Data RFC 2198.
int32_t ModuleRtpRtcpImpl::SetSendREDPayloadType(
const int8_t payload_type) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetSendREDPayloadType(%d)",
- payload_type);
-
return rtp_sender_.SetRED(payload_type);
}
// Get payload type for Redundant Audio Data RFC 2198.
int32_t ModuleRtpRtcpImpl::SendREDPayloadType(
int8_t& payload_type) const {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "SendREDPayloadType()");
-
return rtp_sender_.RED(&payload_type);
}
@@ -1312,54 +985,41 @@ RtpVideoCodecTypes ModuleRtpRtcpImpl::SendVideoCodec() const {
void ModuleRtpRtcpImpl::SetTargetSendBitrate(
const std::vector<uint32_t>& stream_bitrates) {
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_,
- "SetTargetSendBitrate: %ld streams", stream_bitrates.size());
-
- const bool have_child_modules(child_modules_.empty() ? false : true);
- if (have_child_modules) {
+ if (IsDefaultModule()) {
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
if (simulcast_) {
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
for (size_t i = 0;
it != child_modules_.end() && i < stream_bitrates.size(); ++it) {
if ((*it)->SendingMedia()) {
RTPSender& rtp_sender = (*it)->rtp_sender_;
- rtp_sender.SetTargetSendBitrate(stream_bitrates[i]);
+ rtp_sender.SetTargetBitrate(stream_bitrates[i]);
++i;
}
}
} else {
- assert(stream_bitrates.size() == 1);
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ if (stream_bitrates.size() > 1)
+ return;
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
for (; it != child_modules_.end(); ++it) {
RTPSender& rtp_sender = (*it)->rtp_sender_;
- rtp_sender.SetTargetSendBitrate(stream_bitrates[0]);
+ rtp_sender.SetTargetBitrate(stream_bitrates[0]);
}
}
} else {
- assert(stream_bitrates.size() == 1);
- rtp_sender_.SetTargetSendBitrate(stream_bitrates[0]);
+ if (stream_bitrates.size() > 1)
+ return;
+ rtp_sender_.SetTargetBitrate(stream_bitrates[0]);
}
}
int32_t ModuleRtpRtcpImpl::SetKeyFrameRequestMethod(
const KeyFrameRequestMethod method) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetKeyFrameRequestMethod(method:%u)",
- method);
-
key_frame_req_method_ = method;
return 0;
}
int32_t ModuleRtpRtcpImpl::RequestKeyFrame() {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "RequestKeyFrame");
-
switch (key_frame_req_method_) {
case kKeyFrameReqFirRtp:
return rtp_sender_.SendRTPIntraRequest();
@@ -1373,29 +1033,15 @@ int32_t ModuleRtpRtcpImpl::RequestKeyFrame() {
int32_t ModuleRtpRtcpImpl::SendRTCPSliceLossIndication(
const uint8_t picture_id) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SendRTCPSliceLossIndication (picture_id:%d)",
- picture_id);
-
RTCPSender::FeedbackState feedback_state(this);
return rtcp_sender_.SendRTCP(
feedback_state, kRtcpSli, 0, 0, false, picture_id);
}
int32_t ModuleRtpRtcpImpl::SetCameraDelay(const int32_t delay_ms) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetCameraDelay(%d)",
- delay_ms);
- const bool default_instance(child_modules_.empty() ? false : true);
-
- if (default_instance) {
+ if (IsDefaultModule()) {
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
-
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
RtpRtcp* module = *it;
if (module) {
@@ -1412,18 +1058,6 @@ int32_t ModuleRtpRtcpImpl::SetGenericFECStatus(
const bool enable,
const uint8_t payload_type_red,
const uint8_t payload_type_fec) {
- if (enable) {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetGenericFECStatus(enable, %u)",
- payload_type_red);
- } else {
- WEBRTC_TRACE(kTraceModuleCall,
- kTraceRtpRtcp,
- id_,
- "SetGenericFECStatus(disable)");
- }
return rtp_sender_.SetGenericFECStatus(enable,
payload_type_red,
payload_type_fec);
@@ -1433,15 +1067,11 @@ int32_t ModuleRtpRtcpImpl::GenericFECStatus(
bool& enable,
uint8_t& payload_type_red,
uint8_t& payload_type_fec) {
-
- WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, id_, "GenericFECStatus()");
-
bool child_enabled = false;
- const bool default_instance(child_modules_.empty() ? false : true);
- if (default_instance) {
+ if (IsDefaultModule()) {
// For default we need to check all child modules too.
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
RtpRtcp* module = *it;
if (module) {
@@ -1471,12 +1101,11 @@ int32_t ModuleRtpRtcpImpl::GenericFECStatus(
int32_t ModuleRtpRtcpImpl::SetFecParameters(
const FecProtectionParams* delta_params,
const FecProtectionParams* key_params) {
- const bool default_instance(child_modules_.empty() ? false : true);
- if (default_instance) {
+ if (IsDefaultModule()) {
// For default we need to update all child modules too.
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
- std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
RtpRtcp* module = *it;
if (module) {
@@ -1517,9 +1146,7 @@ void ModuleRtpRtcpImpl::BitrateSent(uint32_t* total_rate,
uint32_t* video_rate,
uint32_t* fec_rate,
uint32_t* nack_rate) const {
- const bool default_instance(child_modules_.empty() ? false : true);
-
- if (default_instance) {
+ if (IsDefaultModule()) {
// For default we need to update the send bitrate.
CriticalSectionScoped lock(critical_section_module_ptrs_feedback_.get());
@@ -1532,8 +1159,7 @@ void ModuleRtpRtcpImpl::BitrateSent(uint32_t* total_rate,
if (nack_rate != NULL)
*nack_rate = 0;
- std::list<ModuleRtpRtcpImpl*>::const_iterator it =
- child_modules_.begin();
+ std::vector<ModuleRtpRtcpImpl*>::const_iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
RtpRtcp* module = *it;
if (module) {
@@ -1570,21 +1196,7 @@ void ModuleRtpRtcpImpl::BitrateSent(uint32_t* total_rate,
void ModuleRtpRtcpImpl::RegisterVideoBitrateObserver(
BitrateStatisticsObserver* observer) {
- {
- CriticalSectionScoped cs(critical_section_module_ptrs_.get());
- if (!child_modules_.empty()) {
- for (std::list<ModuleRtpRtcpImpl*>::const_iterator it =
- child_modules_.begin();
- it != child_modules_.end();
- ++it) {
- RtpRtcp* module = *it;
- if (module)
- module->RegisterVideoBitrateObserver(observer);
- }
- return;
- }
- }
-
+ assert(!IsDefaultModule());
rtp_sender_.RegisterBitrateObserver(observer);
}
@@ -1592,7 +1204,6 @@ BitrateStatisticsObserver* ModuleRtpRtcpImpl::GetVideoBitrateObserver() const {
return rtp_sender_.GetBitrateObserver();
}
-// Bad state of RTP receiver request a keyframe.
void ModuleRtpRtcpImpl::OnRequestIntraFrame() {
RequestKeyFrame();
}
@@ -1716,4 +1327,9 @@ FrameCountObserver* ModuleRtpRtcpImpl::GetSendFrameCountObserver() const {
return rtp_sender_.GetFrameCountObserver();
}
+bool ModuleRtpRtcpImpl::IsDefaultModule() const {
+ CriticalSectionScoped cs(critical_section_module_ptrs_.get());
+ return !child_modules_.empty();
+}
+
} // Namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
index 075770dd235..55826b6fe8b 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h
@@ -21,10 +21,6 @@
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/test/testsupport/gtest_prod_util.h"
-#ifdef MATLAB
-class MatlabPlot;
-#endif
-
namespace webrtc {
class ModuleRtpRtcpImpl : public RtpRtcp {
@@ -80,7 +76,7 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
virtual uint32_t SSRC() const OVERRIDE;
// Configure SSRC, default is a random number.
- virtual int32_t SetSSRC(const uint32_t ssrc) OVERRIDE;
+ virtual void SetSSRC(const uint32_t ssrc) OVERRIDE;
virtual int32_t CSRCs(uint32_t arr_of_csrc[kRtpCsrcSize]) const OVERRIDE;
@@ -95,13 +91,12 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
virtual uint32_t ByteCountSent() const;
- virtual int32_t SetRTXSendStatus(const int mode,
- const bool set_ssrc,
- const uint32_t ssrc) OVERRIDE;
+ virtual void SetRTXSendStatus(const int mode) OVERRIDE;
- virtual int32_t RTXSendStatus(int* mode, uint32_t* ssrc,
- int* payloadType) const OVERRIDE;
+ virtual void RTXSendStatus(int* mode, uint32_t* ssrc,
+ int* payloadType) const OVERRIDE;
+ virtual void SetRtxSsrc(uint32_t ssrc) OVERRIDE;
virtual void SetRtxSendPayloadType(int payload_type) OVERRIDE;
@@ -197,10 +192,14 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
// Set received RTCP report block.
virtual int32_t AddRTCPReportBlock(
- const uint32_t ssrc, const RTCPReportBlock* receive_block) OVERRIDE;
+ const uint32_t ssrc, const RTCPReportBlock* receive_block) OVERRIDE;
virtual int32_t RemoveRTCPReportBlock(const uint32_t ssrc) OVERRIDE;
+ virtual void GetRtcpPacketTypeCounters(
+ RtcpPacketTypeCounter* packets_sent,
+ RtcpPacketTypeCounter* packets_received) const OVERRIDE;
+
// (REMB) Receiver Estimated Max Bitrate.
virtual bool REMB() const OVERRIDE;
@@ -291,14 +290,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
// Get payload type for Redundant Audio Data RFC 2198.
virtual int32_t SendREDPayloadType(int8_t& payload_type) const OVERRIDE;
- // Set status and id for header-extension-for-audio-level-indication.
- virtual int32_t SetRTPAudioLevelIndicationStatus(
- const bool enable, const uint8_t id) OVERRIDE;
-
- // Get status and id for header-extension-for-audio-level-indication.
- virtual int32_t GetRTPAudioLevelIndicationStatus(
- bool& enable, uint8_t& id) const OVERRIDE;
-
// Store the audio level in d_bov for header-extension-for-audio-level-
// indication.
virtual int32_t SetAudioLevel(const uint8_t level_d_bov) OVERRIDE;
@@ -418,6 +409,8 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
void set_rtt_ms(uint32_t rtt_ms);
uint32_t rtt_ms() const;
+ bool IsDefaultModule() const;
+
int32_t id_;
const bool audio_;
bool collision_detected_;
@@ -429,7 +422,8 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
scoped_ptr<CriticalSectionWrapper> critical_section_module_ptrs_;
scoped_ptr<CriticalSectionWrapper> critical_section_module_ptrs_feedback_;
ModuleRtpRtcpImpl* default_module_;
- std::list<ModuleRtpRtcpImpl*> child_modules_;
+ std::vector<ModuleRtpRtcpImpl*> child_modules_;
+ size_t padding_index_;
// Send side
NACKMethod nack_method_;
@@ -442,10 +436,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
RemoteBitrateEstimator* remote_bitrate_;
-#ifdef MATLAB
- MatlabPlot* plot1_;
-#endif
-
RtcpRttStats* rtt_stats_;
// The processed RTT from RtcpRttStats.
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
index 50f7f2e1cb3..eba4e010b59 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl_unittest.cc
@@ -12,11 +12,23 @@
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/common_types.h"
+#include "webrtc/modules/pacing/include/mock/mock_paced_sender.h"
+#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_impl.h"
+#include "webrtc/system_wrappers/interface/scoped_vector.h"
+
+using ::testing::_;
+using ::testing::NiceMock;
+using ::testing::Return;
+using ::testing::SaveArg;
namespace webrtc {
namespace {
+const uint32_t kSenderSsrc = 0x12345;
+const uint32_t kReceiverSsrc = 0x23456;
+const uint32_t kSenderRtxSsrc = 0x32345;
+const uint32_t kOneWayNetworkDelayMs = 100;
class RtcpRttStatsTestImpl : public RtcpRttStats {
public:
@@ -35,12 +47,12 @@ class RtcpRttStatsTestImpl : public RtcpRttStats {
class SendTransport : public Transport,
public NullRtpData {
public:
- SendTransport() : rtp_rtcp_impl_(NULL), clock_(NULL), delay_ms_(0) {}
+ SendTransport() : receiver_(NULL), clock_(NULL), delay_ms_(0) {}
- void SetRtpRtcpModule(ModuleRtpRtcpImpl* rtp_rtcp_impl) {
- rtp_rtcp_impl_ = rtp_rtcp_impl;
+ void SetRtpRtcpModule(ModuleRtpRtcpImpl* receiver) {
+ receiver_ = receiver;
}
- void SimulateNetworkDelay(int delay_ms, SimulatedClock* clock) {
+ void SimulateNetworkDelay(uint32_t delay_ms, SimulatedClock* clock) {
clock_ = clock;
delay_ms_ = delay_ms;
}
@@ -51,14 +63,48 @@ class SendTransport : public Transport,
if (clock_) {
clock_->AdvanceTimeMilliseconds(delay_ms_);
}
- EXPECT_TRUE(rtp_rtcp_impl_ != NULL);
- EXPECT_EQ(0, rtp_rtcp_impl_->IncomingRtcpPacket(
+ EXPECT_TRUE(receiver_ != NULL);
+ EXPECT_EQ(0, receiver_->IncomingRtcpPacket(
static_cast<const uint8_t*>(data), len));
return len;
}
- ModuleRtpRtcpImpl* rtp_rtcp_impl_;
+ ModuleRtpRtcpImpl* receiver_;
SimulatedClock* clock_;
- int delay_ms_;
+ uint32_t delay_ms_;
+};
+
+class RtpRtcpModule {
+ public:
+ RtpRtcpModule(SimulatedClock* clock)
+ : receive_statistics_(ReceiveStatistics::Create(clock)) {
+ RtpRtcp::Configuration config;
+ config.audio = false;
+ config.clock = clock;
+ config.outgoing_transport = &transport_;
+ config.receive_statistics = receive_statistics_.get();
+ config.rtt_stats = &rtt_stats_;
+
+ impl_.reset(new ModuleRtpRtcpImpl(config));
+ EXPECT_EQ(0, impl_->SetRTCPStatus(kRtcpCompound));
+
+ transport_.SimulateNetworkDelay(kOneWayNetworkDelayMs, clock);
+ }
+
+ RtcpPacketTypeCounter packets_sent_;
+ RtcpPacketTypeCounter packets_received_;
+ scoped_ptr<ReceiveStatistics> receive_statistics_;
+ SendTransport transport_;
+ RtcpRttStatsTestImpl rtt_stats_;
+ scoped_ptr<ModuleRtpRtcpImpl> impl_;
+
+ RtcpPacketTypeCounter RtcpSent() {
+ impl_->GetRtcpPacketTypeCounters(&packets_sent_, &packets_received_);
+ return packets_sent_;
+ }
+ RtcpPacketTypeCounter RtcpReceived() {
+ impl_->GetRtcpPacketTypeCounters(&packets_sent_, &packets_received_);
+ return packets_received_;
+ }
};
} // namespace
@@ -66,97 +112,357 @@ class RtpRtcpImplTest : public ::testing::Test {
protected:
RtpRtcpImplTest()
: clock_(1335900000),
- receive_statistics_(ReceiveStatistics::Create(&clock_)) {
- RtpRtcp::Configuration configuration;
- configuration.id = 0;
- configuration.audio = false;
- configuration.clock = &clock_;
- configuration.outgoing_transport = &transport_;
- configuration.receive_statistics = receive_statistics_.get();
- configuration.rtt_stats = &rtt_stats_;
-
- rtp_rtcp_impl_.reset(new ModuleRtpRtcpImpl(configuration));
- transport_.SetRtpRtcpModule(rtp_rtcp_impl_.get());
+ sender_(&clock_),
+ receiver_(&clock_) {
+ // Send module.
+ EXPECT_EQ(0, sender_.impl_->SetSendingStatus(true));
+ sender_.impl_->SetSSRC(kSenderSsrc);
+ sender_.impl_->SetRemoteSSRC(kReceiverSsrc);
+ // Receive module.
+ EXPECT_EQ(0, receiver_.impl_->SetSendingStatus(false));
+ receiver_.impl_->SetSSRC(kReceiverSsrc);
+ receiver_.impl_->SetRemoteSSRC(kSenderSsrc);
+ // Transport settings.
+ sender_.transport_.SetRtpRtcpModule(receiver_.impl_.get());
+ receiver_.transport_.SetRtpRtcpModule(sender_.impl_.get());
}
-
SimulatedClock clock_;
- scoped_ptr<ReceiveStatistics> receive_statistics_;
- scoped_ptr<ModuleRtpRtcpImpl> rtp_rtcp_impl_;
- SendTransport transport_;
- RtcpRttStatsTestImpl rtt_stats_;
+ RtpRtcpModule sender_;
+ RtpRtcpModule receiver_;
};
TEST_F(RtpRtcpImplTest, Rtt) {
- const uint32_t kSsrc = 0x12345;
- RTPHeader header = {};
+ RTPHeader header;
header.timestamp = 1;
header.sequenceNumber = 123;
- header.ssrc = kSsrc;
+ header.ssrc = kSenderSsrc;
header.headerLength = 12;
- receive_statistics_->IncomingPacket(header, 100, false);
-
- rtp_rtcp_impl_->SetRemoteSSRC(kSsrc);
- EXPECT_EQ(0, rtp_rtcp_impl_->SetSendingStatus(true));
- EXPECT_EQ(0, rtp_rtcp_impl_->SetRTCPStatus(kRtcpCompound));
- EXPECT_EQ(0, rtp_rtcp_impl_->SetSSRC(kSsrc));
+ receiver_.receive_statistics_->IncomingPacket(header, 100, false);
- // A SR should have been sent and received.
- EXPECT_EQ(0, rtp_rtcp_impl_->SendRTCP(kRtcpReport));
+ // Sender module should send a SR.
+ EXPECT_EQ(0, sender_.impl_->SendRTCP(kRtcpReport));
- // Send new SR. A response to the last SR should be sent.
+ // Receiver module should send a RR with a response to the last received SR.
clock_.AdvanceTimeMilliseconds(1000);
- transport_.SimulateNetworkDelay(100, &clock_);
- EXPECT_EQ(0, rtp_rtcp_impl_->SendRTCP(kRtcpReport));
+ EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpReport));
// Verify RTT.
uint16_t rtt;
uint16_t avg_rtt;
uint16_t min_rtt;
uint16_t max_rtt;
- EXPECT_EQ(0, rtp_rtcp_impl_->RTT(kSsrc, &rtt, &avg_rtt, &min_rtt, &max_rtt));
- EXPECT_EQ(100, rtt);
- EXPECT_EQ(100, avg_rtt);
- EXPECT_EQ(100, min_rtt);
- EXPECT_EQ(100, max_rtt);
+ EXPECT_EQ(0,
+ sender_.impl_->RTT(kReceiverSsrc, &rtt, &avg_rtt, &min_rtt, &max_rtt));
+ EXPECT_EQ(2 * kOneWayNetworkDelayMs, rtt);
+ EXPECT_EQ(2 * kOneWayNetworkDelayMs, avg_rtt);
+ EXPECT_EQ(2 * kOneWayNetworkDelayMs, min_rtt);
+ EXPECT_EQ(2 * kOneWayNetworkDelayMs, max_rtt);
// No RTT from other ssrc.
EXPECT_EQ(-1,
- rtp_rtcp_impl_->RTT(kSsrc + 1, &rtt, &avg_rtt, &min_rtt, &max_rtt));
+ sender_.impl_->RTT(kReceiverSsrc+1, &rtt, &avg_rtt, &min_rtt, &max_rtt));
// Verify RTT from rtt_stats config.
- EXPECT_EQ(0U, rtt_stats_.LastProcessedRtt());
- EXPECT_EQ(0U, rtp_rtcp_impl_->rtt_ms());
- rtp_rtcp_impl_->Process();
- EXPECT_EQ(100U, rtt_stats_.LastProcessedRtt());
- EXPECT_EQ(100U, rtp_rtcp_impl_->rtt_ms());
+ EXPECT_EQ(0U, sender_.rtt_stats_.LastProcessedRtt());
+ EXPECT_EQ(0U, sender_.impl_->rtt_ms());
+ sender_.impl_->Process();
+ EXPECT_EQ(2 * kOneWayNetworkDelayMs, sender_.rtt_stats_.LastProcessedRtt());
+ EXPECT_EQ(2 * kOneWayNetworkDelayMs, sender_.impl_->rtt_ms());
}
TEST_F(RtpRtcpImplTest, SetRtcpXrRrtrStatus) {
- EXPECT_FALSE(rtp_rtcp_impl_->RtcpXrRrtrStatus());
- rtp_rtcp_impl_->SetRtcpXrRrtrStatus(true);
- EXPECT_TRUE(rtp_rtcp_impl_->RtcpXrRrtrStatus());
+ EXPECT_FALSE(receiver_.impl_->RtcpXrRrtrStatus());
+ receiver_.impl_->SetRtcpXrRrtrStatus(true);
+ EXPECT_TRUE(receiver_.impl_->RtcpXrRrtrStatus());
}
TEST_F(RtpRtcpImplTest, RttForReceiverOnly) {
- rtp_rtcp_impl_->SetRtcpXrRrtrStatus(true);
- EXPECT_EQ(0, rtp_rtcp_impl_->SetSendingStatus(false));
- EXPECT_EQ(0, rtp_rtcp_impl_->SetRTCPStatus(kRtcpCompound));
- EXPECT_EQ(0, rtp_rtcp_impl_->SetSSRC(0x12345));
+ receiver_.impl_->SetRtcpXrRrtrStatus(true);
- // A Receiver time reference report (RTRR) should be sent and received.
- EXPECT_EQ(0, rtp_rtcp_impl_->SendRTCP(kRtcpReport));
+ // Receiver module should send a Receiver time reference report (RTRR).
+ EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpReport));
- // Send new RTRR. A response to the last RTRR should be sent.
+ // Sender module should send a response to the last received RTRR (DLRR).
clock_.AdvanceTimeMilliseconds(1000);
- transport_.SimulateNetworkDelay(100, &clock_);
- EXPECT_EQ(0, rtp_rtcp_impl_->SendRTCP(kRtcpReport));
+ EXPECT_EQ(0, sender_.impl_->SendRTCP(kRtcpReport));
// Verify RTT.
- EXPECT_EQ(0U, rtt_stats_.LastProcessedRtt());
- EXPECT_EQ(0U, rtp_rtcp_impl_->rtt_ms());
- rtp_rtcp_impl_->Process();
- EXPECT_EQ(100U, rtt_stats_.LastProcessedRtt());
- EXPECT_EQ(100U, rtp_rtcp_impl_->rtt_ms());
+ EXPECT_EQ(0U, receiver_.rtt_stats_.LastProcessedRtt());
+ EXPECT_EQ(0U, receiver_.impl_->rtt_ms());
+ receiver_.impl_->Process();
+ EXPECT_EQ(2 * kOneWayNetworkDelayMs, receiver_.rtt_stats_.LastProcessedRtt());
+ EXPECT_EQ(2 * kOneWayNetworkDelayMs, receiver_.impl_->rtt_ms());
+}
+
+TEST_F(RtpRtcpImplTest, RtcpPacketTypeCounter_Nack) {
+ EXPECT_EQ(0U, sender_.RtcpReceived().nack_packets);
+ EXPECT_EQ(0U, receiver_.RtcpSent().nack_packets);
+ // Receive module sends a NACK.
+ const uint16_t kNackLength = 1;
+ uint16_t nack_list[kNackLength] = {123};
+ EXPECT_EQ(0, receiver_.impl_->SendNACK(nack_list, kNackLength));
+ EXPECT_EQ(1U, receiver_.RtcpSent().nack_packets);
+
+ // Send module receives the NACK.
+ EXPECT_EQ(1U, sender_.RtcpReceived().nack_packets);
+}
+
+TEST_F(RtpRtcpImplTest, RtcpPacketTypeCounter_FirAndPli) {
+ EXPECT_EQ(0U, sender_.RtcpReceived().fir_packets);
+ EXPECT_EQ(0U, receiver_.RtcpSent().fir_packets);
+ // Receive module sends a FIR.
+ EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpFir));
+ EXPECT_EQ(1U, receiver_.RtcpSent().fir_packets);
+ // Send module receives the FIR.
+ EXPECT_EQ(1U, sender_.RtcpReceived().fir_packets);
+
+ // Receive module sends a FIR and PLI.
+ EXPECT_EQ(0, receiver_.impl_->SendRTCP(kRtcpFir | kRtcpPli));
+ EXPECT_EQ(2U, receiver_.RtcpSent().fir_packets);
+ EXPECT_EQ(1U, receiver_.RtcpSent().pli_packets);
+ // Send module receives the FIR and PLI.
+ EXPECT_EQ(2U, sender_.RtcpReceived().fir_packets);
+ EXPECT_EQ(1U, sender_.RtcpReceived().pli_packets);
+}
+
+class RtpSendingTestTransport : public Transport {
+ public:
+ void ResetCounters() { bytes_received_.clear(); }
+
+ virtual int SendPacket(int channel, const void* data, int length) {
+ RTPHeader header;
+ scoped_ptr<RtpHeaderParser> parser(RtpHeaderParser::Create());
+ EXPECT_TRUE(
+ parser->Parse(static_cast<const uint8_t*>(data), length, &header));
+ bytes_received_[header.ssrc] += length;
+ ++packets_received_[header.ssrc];
+ return length;
+ }
+
+ virtual int SendRTCPPacket(int channel, const void* data, int length) {
+ return length;
+ }
+
+ int GetPacketsReceived(uint32_t ssrc) const {
+ std::map<uint32_t, int>::const_iterator it = packets_received_.find(ssrc);
+ if (it == packets_received_.end())
+ return 0;
+ return it->second;
+ }
+
+ int GetBytesReceived(uint32_t ssrc) const {
+ std::map<uint32_t, int>::const_iterator it = bytes_received_.find(ssrc);
+ if (it == bytes_received_.end())
+ return 0;
+ return it->second;
+ }
+
+ int GetTotalBytesReceived() const {
+ int sum = 0;
+ for (std::map<uint32_t, int>::const_iterator it = bytes_received_.begin();
+ it != bytes_received_.end();
+ ++it) {
+ sum += it->second;
+ }
+ return sum;
+ }
+
+ private:
+ std::map<uint32_t, int> bytes_received_;
+ std::map<uint32_t, int> packets_received_;
+};
+
+class RtpSendingTest : public ::testing::Test {
+ protected:
+ // Map from SSRC to number of received packets and bytes.
+ typedef std::map<uint32_t, std::pair<int, int> > PaddingMap;
+
+ RtpSendingTest() {
+ // Send module.
+ RtpRtcp::Configuration config;
+ config.audio = false;
+ config.clock = Clock::GetRealTimeClock();
+ config.outgoing_transport = &transport_;
+ config.receive_statistics = receive_statistics_.get();
+ config.rtt_stats = &rtt_stats_;
+ config.paced_sender = &pacer_;
+ memset(&codec_, 0, sizeof(VideoCodec));
+ codec_.plType = 100;
+ strncpy(codec_.plName, "VP8", 3);
+ codec_.numberOfSimulcastStreams = 3;
+ codec_.simulcastStream[0].width = 320;
+ codec_.simulcastStream[0].height = 180;
+ codec_.simulcastStream[0].maxBitrate = 300;
+ codec_.simulcastStream[1].width = 640;
+ codec_.simulcastStream[1].height = 360;
+ codec_.simulcastStream[1].maxBitrate = 600;
+ codec_.simulcastStream[2].width = 1280;
+ codec_.simulcastStream[2].height = 720;
+ codec_.simulcastStream[2].maxBitrate = 1200;
+ // We need numberOfSimulcastStreams + 1 RTP modules since we need one
+ // default module.
+ for (int i = 0; i < codec_.numberOfSimulcastStreams + 1; ++i) {
+ RtpRtcp* sender = RtpRtcp::CreateRtpRtcp(config);
+ EXPECT_EQ(0, sender->RegisterSendPayload(codec_));
+ EXPECT_EQ(0, sender->SetSendingStatus(true));
+ EXPECT_EQ(0, sender->SetSendingMediaStatus(true));
+ sender->SetSSRC(kSenderSsrc + i);
+ sender->SetRemoteSSRC(kReceiverSsrc + i);
+ senders_.push_back(sender);
+ config.default_module = senders_[0];
+ }
+ std::vector<uint32_t> bitrates;
+ bitrates.push_back(codec_.simulcastStream[0].maxBitrate);
+ bitrates.push_back(codec_.simulcastStream[1].maxBitrate);
+ bitrates.push_back(codec_.simulcastStream[2].maxBitrate);
+ senders_[0]->SetTargetSendBitrate(bitrates);
+ }
+
+ ~RtpSendingTest() {
+ for (int i = senders_.size() - 1; i >= 0; --i) {
+ delete senders_[i];
+ }
+ }
+
+ void SendFrameOnSender(int sender_index,
+ const uint8_t* payload,
+ size_t length) {
+ RTPVideoHeader rtp_video_header = {
+ codec_.simulcastStream[sender_index].width,
+ codec_.simulcastStream[sender_index].height,
+ true,
+ 0,
+ kRtpVideoVp8,
+ {}};
+ uint32_t seq_num = 0;
+ uint32_t ssrc = 0;
+ int64_t capture_time_ms = 0;
+ bool retransmission = false;
+ EXPECT_CALL(pacer_, SendPacket(_, _, _, _, _, _))
+ .WillRepeatedly(DoAll(SaveArg<1>(&ssrc),
+ SaveArg<2>(&seq_num),
+ SaveArg<3>(&capture_time_ms),
+ SaveArg<5>(&retransmission),
+ Return(true)));
+ EXPECT_EQ(0,
+ senders_[sender_index]->SendOutgoingData(kVideoFrameKey,
+ codec_.plType,
+ 0,
+ 0,
+ payload,
+ length,
+ NULL,
+ &rtp_video_header));
+ EXPECT_TRUE(senders_[sender_index]->TimeToSendPacket(
+ ssrc, seq_num, capture_time_ms, retransmission));
+ }
+
+ void ExpectPadding(const PaddingMap& expected_padding) {
+ int expected_total_bytes = 0;
+ for (PaddingMap::const_iterator it = expected_padding.begin();
+ it != expected_padding.end();
+ ++it) {
+ int packets_received = transport_.GetBytesReceived(it->first);
+ if (it->second.first > 0) {
+ EXPECT_GE(packets_received, it->second.first)
+ << "On SSRC: " << it->first;
+ }
+ int bytes_received = transport_.GetBytesReceived(it->first);
+ expected_total_bytes += bytes_received;
+ if (it->second.second > 0) {
+ EXPECT_GE(bytes_received, it->second.second)
+ << "On SSRC: " << it->first;
+ } else {
+ EXPECT_EQ(0, bytes_received) << "On SSRC: " << it->first;
+ }
+ }
+ EXPECT_EQ(expected_total_bytes, transport_.GetTotalBytesReceived());
+ }
+
+ scoped_ptr<ReceiveStatistics> receive_statistics_;
+ RtcpRttStatsTestImpl rtt_stats_;
+ std::vector<RtpRtcp*> senders_;
+ RtpSendingTestTransport transport_;
+ NiceMock<MockPacedSender> pacer_;
+ VideoCodec codec_;
+};
+
+TEST_F(RtpSendingTest, DISABLED_RoundRobinPadding) {
+ // We have to send on an SSRC to be allowed to pad, since a marker bit must
+ // be sent prior to padding packets.
+ const uint8_t payload[200] = {0};
+ for (int i = 0; i < codec_.numberOfSimulcastStreams; ++i) {
+ SendFrameOnSender(i + 1, payload, sizeof(payload));
+ }
+ transport_.ResetCounters();
+ senders_[0]->TimeToSendPadding(500);
+ PaddingMap expected_padding;
+ expected_padding[kSenderSsrc + 1] = std::make_pair(2, 500);
+ expected_padding[kSenderSsrc + 2] = std::make_pair(0, 0);
+ expected_padding[kSenderSsrc + 3] = std::make_pair(0, 0);
+ ExpectPadding(expected_padding);
+ senders_[0]->TimeToSendPadding(1000);
+ expected_padding[kSenderSsrc + 2] = std::make_pair(4, 1000);
+ ExpectPadding(expected_padding);
+ senders_[0]->TimeToSendPadding(1500);
+ expected_padding[kSenderSsrc + 3] = std::make_pair(6, 1500);
+ ExpectPadding(expected_padding);
+}
+
+TEST_F(RtpSendingTest, DISABLED_RoundRobinPaddingRtx) {
+ // Enable RTX to allow padding to be sent prior to media.
+ for (int i = 1; i < codec_.numberOfSimulcastStreams + 1; ++i) {
+ senders_[i]->SetRtxSendPayloadType(96);
+ senders_[i]->SetRtxSsrc(kSenderRtxSsrc + i);
+ senders_[i]->SetRTXSendStatus(kRtxRetransmitted);
+ }
+ transport_.ResetCounters();
+ senders_[0]->TimeToSendPadding(500);
+ PaddingMap expected_padding;
+ expected_padding[kSenderSsrc + 1] = std::make_pair(0, 0);
+ expected_padding[kSenderSsrc + 2] = std::make_pair(0, 0);
+ expected_padding[kSenderSsrc + 3] = std::make_pair(0, 0);
+ expected_padding[kSenderRtxSsrc + 1] = std::make_pair(2, 500);
+ expected_padding[kSenderRtxSsrc + 2] = std::make_pair(0, 0);
+ expected_padding[kSenderRtxSsrc + 3] = std::make_pair(0, 0);
+ ExpectPadding(expected_padding);
+ senders_[0]->TimeToSendPadding(1000);
+ expected_padding[kSenderRtxSsrc + 2] = std::make_pair(4, 500);
+ ExpectPadding(expected_padding);
+ senders_[0]->TimeToSendPadding(1500);
+
+ expected_padding[kSenderRtxSsrc + 3] = std::make_pair(6, 500);
+ ExpectPadding(expected_padding);
}
+TEST_F(RtpSendingTest, DISABLED_RoundRobinPaddingRtxRedundantPayloads) {
+ for (int i = 1; i < codec_.numberOfSimulcastStreams + 1; ++i) {
+ senders_[i]->SetRtxSendPayloadType(96);
+ senders_[i]->SetRtxSsrc(kSenderRtxSsrc + i);
+ senders_[i]->SetRTXSendStatus(kRtxRetransmitted | kRtxRedundantPayloads);
+ senders_[i]->SetStorePacketsStatus(true, 100);
+ }
+ // First send payloads so that we have something to retransmit.
+ const size_t kPayloadSize = 500;
+ const uint8_t payload[kPayloadSize] = {0};
+ for (int i = 0; i < codec_.numberOfSimulcastStreams; ++i) {
+ SendFrameOnSender(i + 1, payload, sizeof(payload));
+ }
+ transport_.ResetCounters();
+ senders_[0]->TimeToSendPadding(500);
+ PaddingMap expected_padding;
+ expected_padding[kSenderSsrc + 1] = std::make_pair<int, int>(0, 0);
+ expected_padding[kSenderSsrc + 2] = std::make_pair<int, int>(0, 0);
+ expected_padding[kSenderSsrc + 3] = std::make_pair<int, int>(0, 0);
+ expected_padding[kSenderRtxSsrc + 1] = std::make_pair<int, int>(1, 500);
+ expected_padding[kSenderRtxSsrc + 2] = std::make_pair<int, int>(0, 0);
+ expected_padding[kSenderRtxSsrc + 3] = std::make_pair<int, int>(0, 0);
+ ExpectPadding(expected_padding);
+ senders_[0]->TimeToSendPadding(1000);
+ expected_padding[kSenderRtxSsrc + 2] = std::make_pair<int, int>(2, 1000);
+ ExpectPadding(expected_padding);
+ senders_[0]->TimeToSendPadding(1500);
+ expected_padding[kSenderRtxSsrc + 3] = std::make_pair<int, int>(3, 1500);
+ ExpectPadding(expected_padding);
+}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
index fd320324b8a..7cfcd7222c8 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.cc
@@ -15,7 +15,7 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_video.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
@@ -56,7 +56,6 @@ RTPSender::RTPSender(const int32_t id,
transport_(transport),
sending_media_(true), // Default to sending media.
max_payload_length_(IP_PACKET_SIZE - 28), // Default is IP-v4/UDP.
- target_send_bitrate_(0),
packet_over_head_(28),
payload_type_(-1),
payload_type_map_(),
@@ -88,7 +87,9 @@ RTPSender::RTPSender(const int32_t id,
csrcs_(),
include_csrcs_(true),
rtx_(kRtxOff),
- payload_type_rtx_(-1) {
+ payload_type_rtx_(-1),
+ target_bitrate_critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+ target_bitrate_(0) {
memset(nack_byte_count_times_, 0, sizeof(nack_byte_count_times_));
memset(nack_byte_count_, 0, sizeof(nack_byte_count_));
memset(csrcs_, 0, sizeof(csrcs_));
@@ -104,9 +105,8 @@ RTPSender::RTPSender(const int32_t id,
audio_ = new RTPSenderAudio(id, clock_, this);
audio_->RegisterAudioCallback(audio_feedback);
} else {
- video_ = new RTPSenderVideo(id, clock_, this);
+ video_ = new RTPSenderVideo(clock_, this);
}
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
}
RTPSender::~RTPSender() {
@@ -125,12 +125,16 @@ RTPSender::~RTPSender() {
}
delete audio_;
delete video_;
+}
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id_, "%s deleted", __FUNCTION__);
+void RTPSender::SetTargetBitrate(uint32_t bitrate) {
+ CriticalSectionScoped cs(target_bitrate_critsect_.get());
+ target_bitrate_ = bitrate;
}
-void RTPSender::SetTargetSendBitrate(const uint32_t bits) {
- target_send_bitrate_ = static_cast<uint16_t>(bits / 1000);
+uint32_t RTPSender::GetTargetBitrate() {
+ CriticalSectionScoped cs(target_bitrate_critsect_.get());
+ return target_bitrate_;
}
uint16_t RTPSender::ActualSendBitrateKbit() const {
@@ -157,10 +161,12 @@ uint32_t RTPSender::NackOverheadRate() const {
bool RTPSender::GetSendSideDelay(int* avg_send_delay_ms,
int* max_send_delay_ms) const {
+ if (!SendingMedia())
+ return false;
CriticalSectionScoped cs(statistics_crit_.get());
SendDelayMap::const_iterator it = send_delays_.upper_bound(
clock_->TimeInMilliseconds() - kSendSideDelayWindowMs);
- if (!sending_media_ || it == send_delays_.end())
+ if (it == send_delays_.end())
return false;
int num_delays = 0;
for (; it != send_delays_.end(); ++it) {
@@ -273,7 +279,10 @@ int32_t RTPSender::DeRegisterSendPayload(
return 0;
}
-int8_t RTPSender::SendPayloadType() const { return payload_type_; }
+int8_t RTPSender::SendPayloadType() const {
+ CriticalSectionScoped cs(send_critsect_);
+ return payload_type_;
+}
int RTPSender::SendPayloadFrequency() const {
return audio_ != NULL ? audio_->AudioFrequency() : kVideoPayloadTypeFrequency;
@@ -284,16 +293,12 @@ int32_t RTPSender::SetMaxPayloadLength(
const uint16_t packet_over_head) {
// Sanity check.
if (max_payload_length < 100 || max_payload_length > IP_PACKET_SIZE) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_, "%s invalid argument",
- __FUNCTION__);
+ LOG(LS_ERROR) << "Invalid max payload length: " << max_payload_length;
return -1;
}
CriticalSectionScoped cs(send_critsect_);
max_payload_length_ = max_payload_length;
packet_over_head_ = packet_over_head;
-
- WEBRTC_TRACE(kTraceInfo, kTraceRtpRtcp, id_, "SetMaxPayloadLength to %d.",
- max_payload_length);
return 0;
}
@@ -301,9 +306,9 @@ uint16_t RTPSender::MaxDataPayloadLength() const {
if (audio_configured_) {
return max_payload_length_ - RTPHeaderLength();
} else {
- return max_payload_length_ - RTPHeaderLength() -
- video_->FECPacketOverhead() - ((rtx_) ? 2 : 0);
- // Include the FEC/ULP/RED overhead.
+ return max_payload_length_ - RTPHeaderLength() // RTP overhead.
+ - video_->FECPacketOverhead() // FEC/ULP/RED overhead.
+ - ((rtx_) ? 2 : 0); // RTX overhead.
}
}
@@ -313,16 +318,14 @@ uint16_t RTPSender::MaxPayloadLength() const {
uint16_t RTPSender::PacketOverHead() const { return packet_over_head_; }
-void RTPSender::SetRTXStatus(int mode, bool set_ssrc, uint32_t ssrc) {
+void RTPSender::SetRTXStatus(int mode) {
CriticalSectionScoped cs(send_critsect_);
rtx_ = mode;
- if (rtx_ != kRtxOff) {
- if (set_ssrc) {
- ssrc_rtx_ = ssrc;
- } else {
- ssrc_rtx_ = ssrc_db_.CreateSSRC(); // Can't be 0.
- }
- }
+}
+
+void RTPSender::SetRtxSsrc(uint32_t ssrc) {
+ CriticalSectionScoped cs(send_critsect_);
+ ssrc_rtx_ = ssrc;
}
void RTPSender::RTXStatus(int* mode, uint32_t* ssrc,
@@ -333,7 +336,6 @@ void RTPSender::RTXStatus(int* mode, uint32_t* ssrc,
*payload_type = payload_type_rtx_;
}
-
void RTPSender::SetRtxPayloadType(int payload_type) {
CriticalSectionScoped cs(send_critsect_);
payload_type_rtx_ = payload_type;
@@ -344,8 +346,7 @@ int32_t RTPSender::CheckPayloadType(const int8_t payload_type,
CriticalSectionScoped cs(send_critsect_);
if (payload_type < 0) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_, "\tinvalid payload_type (%d)",
- payload_type);
+ LOG(LS_ERROR) << "Invalid payload_type " << payload_type;
return -1;
}
if (audio_configured_) {
@@ -367,8 +368,7 @@ int32_t RTPSender::CheckPayloadType(const int8_t payload_type,
std::map<int8_t, ModuleRTPUtility::Payload *>::iterator it =
payload_type_map_.find(payload_type);
if (it == payload_type_map_.end()) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "\tpayloadType:%d not registered", payload_type);
+ LOG(LS_WARNING) << "Payload type " << payload_type << " not registered.";
return -1;
}
payload_type_ = payload_type;
@@ -397,9 +397,7 @@ int32_t RTPSender::SendOutgoingData(
}
RtpVideoCodecTypes video_type = kRtpVideoGeneric;
if (CheckPayloadType(payload_type, &video_type) != 0) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "%s invalid argument failed to find payload_type:%d",
- __FUNCTION__, payload_type);
+ LOG(LS_ERROR) << "Don't send data with unknown payload type.";
return -1;
}
@@ -456,7 +454,7 @@ int RTPSender::SendRedundantPayloads(int payload_type, int bytes_to_send) {
&capture_time_ms)) {
break;
}
- if (!PrepareAndSendPacket(buffer, length, capture_time_ms, true))
+ if (!PrepareAndSendPacket(buffer, length, capture_time_ms, true, false))
return -1;
ModuleRTPUtility::RTPHeaderParser rtp_parser(buffer, length);
RTPHeader rtp_header;
@@ -472,7 +470,8 @@ bool RTPSender::SendPaddingAccordingToBitrate(
// Current bitrate since last estimate(1 second) averaged with the
// estimate since then, to get the most up to date bitrate.
uint32_t current_bitrate = bitrate_sent_.BitrateNow();
- int bitrate_diff = target_send_bitrate_ * 1000 - current_bitrate;
+ uint32_t target_bitrate = GetTargetBitrate();
+ int bitrate_diff = target_bitrate - current_bitrate;
if (bitrate_diff <= 0) {
return true;
}
@@ -483,7 +482,7 @@ bool RTPSender::SendPaddingAccordingToBitrate(
} else {
bytes = (bitrate_diff / 8);
// Cap at 200 ms of target send data.
- int bytes_cap = target_send_bitrate_ * 25; // 1000 / 8 / 5.
+ int bytes_cap = target_bitrate / 1000 * 25; // 1000 / 8 / 5.
if (bytes > bytes_cap) {
bytes = bytes_cap;
}
@@ -528,7 +527,7 @@ int RTPSender::SendPadData(int payload_type, uint32_t timestamp,
StorageType store, bool force_full_size_packets,
bool only_pad_after_markerbit) {
// Drop this packet if we're not sending media packets.
- if (!sending_media_) {
+ if (!SendingMedia()) {
return bytes;
}
int padding_bytes_in_packet = 0;
@@ -596,7 +595,6 @@ bool RTPSender::StorePackets() const {
int32_t RTPSender::ReSendPacket(uint16_t packet_id, uint32_t min_resend_time) {
uint16_t length = IP_PACKET_SIZE;
uint8_t data_buffer[IP_PACKET_SIZE];
- uint8_t *buffer_to_send_ptr = data_buffer;
int64_t capture_time_ms;
if (!packet_history_.GetPacketAndSetSendTime(packet_id, min_resend_time, true,
data_buffer, &length,
@@ -605,19 +603,13 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id, uint32_t min_resend_time) {
return 0;
}
- ModuleRTPUtility::RTPHeaderParser rtp_parser(data_buffer, length);
- RTPHeader header;
- if (!rtp_parser.Parse(header)) {
- assert(false);
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, id_,
- "Failed to parse RTP header of packet to be retransmitted.");
- return -1;
- }
- TRACE_EVENT_INSTANT2("webrtc_rtp", "RTPSender::ReSendPacket",
- "timestamp", header.timestamp,
- "seqnum", header.sequenceNumber);
-
if (paced_sender_) {
+ ModuleRTPUtility::RTPHeaderParser rtp_parser(data_buffer, length);
+ RTPHeader header;
+ if (!rtp_parser.Parse(header)) {
+ assert(false);
+ return -1;
+ }
if (!paced_sender_->SendPacket(PacedSender::kHighPriority,
header.ssrc,
header.sequenceNumber,
@@ -630,17 +622,9 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id, uint32_t min_resend_time) {
}
}
- uint8_t data_buffer_rtx[IP_PACKET_SIZE];
- if ((rtx_ & kRtxRetransmitted) > 0) {
- BuildRtxPacket(data_buffer, &length, data_buffer_rtx);
- buffer_to_send_ptr = data_buffer_rtx;
- }
-
- if (SendPacketToNetwork(buffer_to_send_ptr, length)) {
- UpdateRtpStats(buffer_to_send_ptr, length, header, rtx_ != kRtxOff, true);
- return length;
- }
- return -1;
+ return PrepareAndSendPacket(data_buffer, length, capture_time_ms,
+ (rtx_ & kRtxRetransmitted) > 0, true) ?
+ length : -1;
}
bool RTPSender::SendPacketToNetwork(const uint8_t *packet, uint32_t size) {
@@ -650,10 +634,9 @@ bool RTPSender::SendPacketToNetwork(const uint8_t *packet, uint32_t size) {
}
TRACE_EVENT_INSTANT2("webrtc_rtp", "RTPSender::SendPacketToNetwork",
"size", size, "sent", bytes_sent);
- // TODO(pwesin): Add a separate bitrate for sent bitrate after pacer.
+ // TODO(pwestin): Add a separate bitrate for sent bitrate after pacer.
if (bytes_sent <= 0) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
- "Transport failed to send packet");
+ LOG(LS_WARNING) << "Transport failed to send packet";
return false;
}
return true;
@@ -678,12 +661,12 @@ void RTPSender::OnReceivedNACK(
"num_seqnum", nack_sequence_numbers.size(), "avg_rtt", avg_rtt);
const int64_t now = clock_->TimeInMilliseconds();
uint32_t bytes_re_sent = 0;
+ uint32_t target_bitrate = GetTargetBitrate();
// Enough bandwidth to send NACK?
if (!ProcessNACKBitRate(now)) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "NACK bitrate reached. Skip sending NACK response. Target %d",
- target_send_bitrate_);
+ LOG(LS_INFO) << "NACK bitrate reached. Skip sending NACK response. Target "
+ << target_bitrate;
return;
}
@@ -698,16 +681,15 @@ void RTPSender::OnReceivedNACK(
continue;
} else if (bytes_sent < 0) {
// Failed to send one Sequence number. Give up the rest in this nack.
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, id_,
- "Failed resending RTP packet %d, Discard rest of packets",
- *it);
+ LOG(LS_WARNING) << "Failed resending RTP packet " << *it
+ << ", Discard rest of packets";
break;
}
// Delay bandwidth estimate (RTT * BW).
- if (target_send_bitrate_ != 0 && avg_rtt) {
+ if (target_bitrate != 0 && avg_rtt) {
// kbits/s * ms = bits => bits/8 = bytes
uint32_t target_bytes =
- (static_cast<uint32_t>(target_send_bitrate_) * avg_rtt) >> 3;
+ (static_cast<uint32_t>(target_bitrate / 1000) * avg_rtt) >> 3;
if (bytes_re_sent > target_bytes) {
break; // Ignore the rest of the packets in the list.
}
@@ -722,32 +704,34 @@ void RTPSender::OnReceivedNACK(
bool RTPSender::ProcessNACKBitRate(const uint32_t now) {
uint32_t num = 0;
- int32_t byte_count = 0;
- const uint32_t avg_interval = 1000;
+ int byte_count = 0;
+ const int kAvgIntervalMs = 1000;
+ uint32_t target_bitrate = GetTargetBitrate();
CriticalSectionScoped cs(send_critsect_);
- if (target_send_bitrate_ == 0) {
+ if (target_bitrate == 0) {
return true;
}
for (num = 0; num < NACK_BYTECOUNT_SIZE; ++num) {
- if ((now - nack_byte_count_times_[num]) > avg_interval) {
+ if ((now - nack_byte_count_times_[num]) > kAvgIntervalMs) {
// Don't use data older than 1sec.
break;
} else {
byte_count += nack_byte_count_[num];
}
}
- int32_t time_interval = avg_interval;
+ int time_interval = kAvgIntervalMs;
if (num == NACK_BYTECOUNT_SIZE) {
// More than NACK_BYTECOUNT_SIZE nack messages has been received
// during the last msg_interval.
time_interval = now - nack_byte_count_times_[num - 1];
if (time_interval < 0) {
- time_interval = avg_interval;
+ time_interval = kAvgIntervalMs;
}
}
- return (byte_count * 8) < (target_send_bitrate_ * time_interval);
+ return (byte_count * 8) <
+ static_cast<int>(target_bitrate / 1000 * time_interval);
}
void RTPSender::UpdateNACKBitRate(const uint32_t bytes,
@@ -796,19 +780,21 @@ bool RTPSender::TimeToSendPacket(uint16_t sequence_number,
UpdateDelayStatistics(capture_time_ms, clock_->TimeInMilliseconds());
}
return PrepareAndSendPacket(data_buffer, length, capture_time_ms,
- retransmission && (rtx_ & kRtxRetransmitted) > 0);
+ retransmission && (rtx_ & kRtxRetransmitted) > 0,
+ retransmission);
}
bool RTPSender::PrepareAndSendPacket(uint8_t* buffer,
uint16_t length,
int64_t capture_time_ms,
- bool send_over_rtx) {
+ bool send_over_rtx,
+ bool is_retransmit) {
uint8_t *buffer_to_send_ptr = buffer;
ModuleRTPUtility::RTPHeaderParser rtp_parser(buffer, length);
RTPHeader rtp_header;
rtp_parser.Parse(rtp_header);
- TRACE_EVENT_INSTANT2("webrtc_rtp", "RTPSender::TimeToSendPacket",
+ TRACE_EVENT_INSTANT2("webrtc_rtp", "PrepareAndSendPacket",
"timestamp", rtp_header.timestamp,
"seqnum", rtp_header.sequenceNumber);
@@ -820,20 +806,12 @@ bool RTPSender::PrepareAndSendPacket(uint8_t* buffer,
int64_t now_ms = clock_->TimeInMilliseconds();
int64_t diff_ms = now_ms - capture_time_ms;
- bool updated_transmission_time_offset =
- UpdateTransmissionTimeOffset(buffer_to_send_ptr, length, rtp_header,
- diff_ms);
- bool updated_abs_send_time =
- UpdateAbsoluteSendTime(buffer_to_send_ptr, length, rtp_header, now_ms);
- if (updated_transmission_time_offset || updated_abs_send_time) {
- // Update stored packet in case of receiving a re-transmission request.
- packet_history_.ReplaceRTPHeader(buffer_to_send_ptr,
- rtp_header.sequenceNumber,
- rtp_header.headerLength);
- }
-
+ UpdateTransmissionTimeOffset(buffer_to_send_ptr, length, rtp_header,
+ diff_ms);
+ UpdateAbsoluteSendTime(buffer_to_send_ptr, length, rtp_header, now_ms);
bool ret = SendPacketToNetwork(buffer_to_send_ptr, length);
- UpdateRtpStats(buffer_to_send_ptr, length, rtp_header, false, false);
+ UpdateRtpStats(buffer_to_send_ptr, length, rtp_header, send_over_rtx,
+ is_retransmit);
return ret;
}
@@ -842,15 +820,16 @@ void RTPSender::UpdateRtpStats(const uint8_t* buffer,
const RTPHeader& header,
bool is_rtx,
bool is_retransmit) {
- CriticalSectionScoped lock(statistics_crit_.get());
StreamDataCounters* counters;
- uint32_t ssrc;
+ // Get ssrc before taking statistics_crit_ to avoid possible deadlock.
+ uint32_t ssrc = SSRC();
+
+ CriticalSectionScoped lock(statistics_crit_.get());
if (is_rtx) {
counters = &rtx_rtp_stats_;
ssrc = ssrc_rtx_;
} else {
counters = &rtp_stats_;
- ssrc = ssrc_;
}
bitrate_sent_.Update(size);
@@ -887,14 +866,14 @@ bool RTPSender::IsFecPacket(const uint8_t* buffer,
}
int RTPSender::TimeToSendPadding(int bytes) {
- if (!sending_media_) {
- return 0;
- }
int payload_type;
int64_t capture_time_ms;
uint32_t timestamp;
{
CriticalSectionScoped cs(send_critsect_);
+ if (!sending_media_) {
+ return 0;
+ }
payload_type = ((rtx_ & kRtxRedundantPayloads) > 0) ? payload_type_rtx_ :
payload_type_;
timestamp = timestamp_;
@@ -909,8 +888,13 @@ int RTPSender::TimeToSendPadding(int bytes) {
int bytes_sent = SendRedundantPayloads(payload_type, bytes);
bytes -= bytes_sent;
if (bytes > 0) {
- int padding_sent = SendPadData(payload_type, timestamp, capture_time_ms,
- bytes, kDontStore, true, true);
+ int padding_sent = SendPadData(payload_type,
+ timestamp,
+ capture_time_ms,
+ bytes,
+ kDontStore,
+ true,
+ rtx_ == kRtxOff);
bytes_sent += padding_sent;
}
return bytes_sent;
@@ -1116,9 +1100,8 @@ uint16_t RTPSender::BuildRTPHeaderExtension(uint8_t* data_buffer) const {
data_buffer + kHeaderLength + total_block_length);
break;
case kRtpExtensionAudioLevel:
- // Because AudioLevel is handled specially by RTPSenderAudio, we pretend
- // we don't have to care about it here, which is true until we wan't to
- // use it together with any of the other extensions we support.
+ block_length = BuildAudioLevelExtension(
+ data_buffer + kHeaderLength + total_block_length);
break;
case kRtpExtensionAbsoluteSendTime:
block_length = BuildAbsoluteSendTimeExtension(
@@ -1178,8 +1161,42 @@ uint8_t RTPSender::BuildTransmissionTimeOffsetExtension(
return kTransmissionTimeOffsetLength;
}
-uint8_t RTPSender::BuildAbsoluteSendTimeExtension(
- uint8_t* data_buffer) const {
+uint8_t RTPSender::BuildAudioLevelExtension(uint8_t* data_buffer) const {
+ // An RTP Header Extension for Client-to-Mixer Audio Level Indication
+ //
+ // https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/
+ //
+ // The form of the audio level extension block:
+ //
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | ID | len=0 |V| level | 0x00 | 0x00 |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ //
+ // Note that we always include 2 pad bytes, which will result in legal and
+ // correctly parsed RTP, but may be a bit wasteful if more short extensions
+ // are implemented. Right now the pad bytes would anyway be required at end
+ // of the extension block, so it makes no difference.
+
+ // Get id defined by user.
+ uint8_t id;
+ if (rtp_header_extension_map_.GetId(kRtpExtensionAudioLevel, &id) != 0) {
+ // Not registered.
+ return 0;
+ }
+ size_t pos = 0;
+ const uint8_t len = 0;
+ data_buffer[pos++] = (id << 4) + len;
+ data_buffer[pos++] = (1 << 7) + 0; // Voice, 0 dBov.
+ data_buffer[pos++] = 0; // Padding.
+ data_buffer[pos++] = 0; // Padding.
+ // kAudioLevelLength is including pad bytes.
+ assert(pos == kAudioLevelLength);
+ return kAudioLevelLength;
+}
+
+uint8_t RTPSender::BuildAbsoluteSendTimeExtension(uint8_t* data_buffer) const {
// Absolute send time in RTP streams.
//
// The absolute send time is signaled to the receiver in-band using the
@@ -1213,106 +1230,138 @@ uint8_t RTPSender::BuildAbsoluteSendTimeExtension(
return kAbsoluteSendTimeLength;
}
-bool RTPSender::UpdateTransmissionTimeOffset(
+void RTPSender::UpdateTransmissionTimeOffset(
uint8_t *rtp_packet, const uint16_t rtp_packet_length,
const RTPHeader &rtp_header, const int64_t time_diff_ms) const {
CriticalSectionScoped cs(send_critsect_);
-
+ // Get id.
+ uint8_t id = 0;
+ if (rtp_header_extension_map_.GetId(kRtpExtensionTransmissionTimeOffset,
+ &id) != 0) {
+ // Not registered.
+ return;
+ }
// Get length until start of header extension block.
int extension_block_pos =
rtp_header_extension_map_.GetLengthUntilBlockStartInBytes(
kRtpExtensionTransmissionTimeOffset);
if (extension_block_pos < 0) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update transmission time offset, not registered.");
- return false;
+ LOG(LS_WARNING)
+ << "Failed to update transmission time offset, not registered.";
+ return;
}
int block_pos = 12 + rtp_header.numCSRCs + extension_block_pos;
if (rtp_packet_length < block_pos + kTransmissionTimeOffsetLength ||
rtp_header.headerLength <
block_pos + kTransmissionTimeOffsetLength) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update transmission time offset, invalid length.");
- return false;
+ LOG(LS_WARNING)
+ << "Failed to update transmission time offset, invalid length.";
+ return;
}
// Verify that header contains extension.
if (!((rtp_packet[12 + rtp_header.numCSRCs] == 0xBE) &&
(rtp_packet[12 + rtp_header.numCSRCs + 1] == 0xDE))) {
- WEBRTC_TRACE(
- kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update transmission time offset, hdr extension not found.");
- return false;
- }
- // Get id.
- uint8_t id = 0;
- if (rtp_header_extension_map_.GetId(kRtpExtensionTransmissionTimeOffset,
- &id) != 0) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update transmission time offset, no id.");
- return false;
+ LOG(LS_WARNING) << "Failed to update transmission time offset, hdr "
+ "extension not found.";
+ return;
}
// Verify first byte in block.
const uint8_t first_block_byte = (id << 4) + 2;
if (rtp_packet[block_pos] != first_block_byte) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update transmission time offset.");
- return false;
+ LOG(LS_WARNING) << "Failed to update transmission time offset.";
+ return;
}
// Update transmission offset field (converting to a 90 kHz timestamp).
ModuleRTPUtility::AssignUWord24ToBuffer(rtp_packet + block_pos + 1,
time_diff_ms * 90); // RTP timestamp.
- return true;
}
-bool RTPSender::UpdateAbsoluteSendTime(
- uint8_t *rtp_packet, const uint16_t rtp_packet_length,
- const RTPHeader &rtp_header, const int64_t now_ms) const {
+bool RTPSender::UpdateAudioLevel(uint8_t *rtp_packet,
+ const uint16_t rtp_packet_length,
+ const RTPHeader &rtp_header,
+ const bool is_voiced,
+ const uint8_t dBov) const {
CriticalSectionScoped cs(send_critsect_);
+ // Get id.
+ uint8_t id = 0;
+ if (rtp_header_extension_map_.GetId(kRtpExtensionAudioLevel, &id) != 0) {
+ // Not registered.
+ return false;
+ }
// Get length until start of header extension block.
int extension_block_pos =
rtp_header_extension_map_.GetLengthUntilBlockStartInBytes(
- kRtpExtensionAbsoluteSendTime);
+ kRtpExtensionAudioLevel);
if (extension_block_pos < 0) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update absolute send time, not registered.");
+ // The feature is not enabled.
return false;
}
int block_pos = 12 + rtp_header.numCSRCs + extension_block_pos;
- if (rtp_packet_length < block_pos + kAbsoluteSendTimeLength ||
- rtp_header.headerLength < block_pos + kAbsoluteSendTimeLength) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update absolute send time, invalid length.");
+ if (rtp_packet_length < block_pos + kAudioLevelLength ||
+ rtp_header.headerLength < block_pos + kAudioLevelLength) {
+ LOG(LS_WARNING) << "Failed to update audio level, invalid length.";
return false;
}
// Verify that header contains extension.
if (!((rtp_packet[12 + rtp_header.numCSRCs] == 0xBE) &&
(rtp_packet[12 + rtp_header.numCSRCs + 1] == 0xDE))) {
- WEBRTC_TRACE(
- kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update absolute send time, hdr extension not found.");
+ LOG(LS_WARNING) << "Failed to update audio level, hdr extension not found.";
+ return false;
+ }
+ // Verify first byte in block.
+ const uint8_t first_block_byte = (id << 4) + 0;
+ if (rtp_packet[block_pos] != first_block_byte) {
+ LOG(LS_WARNING) << "Failed to update audio level.";
return false;
}
+ rtp_packet[block_pos + 1] = (is_voiced ? 0x80 : 0x00) + (dBov & 0x7f);
+ return true;
+}
+
+void RTPSender::UpdateAbsoluteSendTime(
+ uint8_t *rtp_packet, const uint16_t rtp_packet_length,
+ const RTPHeader &rtp_header, const int64_t now_ms) const {
+ CriticalSectionScoped cs(send_critsect_);
+
// Get id.
uint8_t id = 0;
if (rtp_header_extension_map_.GetId(kRtpExtensionAbsoluteSendTime,
&id) != 0) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update absolute send time, no id.");
- return false;
+ // Not registered.
+ return;
+ }
+ // Get length until start of header extension block.
+ int extension_block_pos =
+ rtp_header_extension_map_.GetLengthUntilBlockStartInBytes(
+ kRtpExtensionAbsoluteSendTime);
+ if (extension_block_pos < 0) {
+ // The feature is not enabled.
+ return;
+ }
+ int block_pos = 12 + rtp_header.numCSRCs + extension_block_pos;
+ if (rtp_packet_length < block_pos + kAbsoluteSendTimeLength ||
+ rtp_header.headerLength < block_pos + kAbsoluteSendTimeLength) {
+ LOG(LS_WARNING) << "Failed to update absolute send time, invalid length.";
+ return;
+ }
+ // Verify that header contains extension.
+ if (!((rtp_packet[12 + rtp_header.numCSRCs] == 0xBE) &&
+ (rtp_packet[12 + rtp_header.numCSRCs + 1] == 0xDE))) {
+ LOG(LS_WARNING)
+ << "Failed to update absolute send time, hdr extension not found.";
+ return;
}
// Verify first byte in block.
const uint8_t first_block_byte = (id << 4) + 2;
if (rtp_packet[block_pos] != first_block_byte) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_,
- "Failed to update absolute send time.");
- return false;
+ LOG(LS_WARNING) << "Failed to update absolute send time.";
+ return;
}
// Update absolute send time field (convert ms to 24-bit unsigned with 18 bit
// fractional part).
ModuleRTPUtility::AssignUWord24ToBuffer(rtp_packet + block_pos + 1,
((now_ms << 18) / 1000) & 0x00ffffff);
- return true;
}
void RTPSender::SetSendingStatus(bool enabled) {
@@ -1462,19 +1511,6 @@ int32_t RTPSender::SetAudioPacketSize(
return audio_->SetAudioPacketSize(packet_size_samples);
}
-int32_t RTPSender::SetAudioLevelIndicationStatus(const bool enable,
- const uint8_t ID) {
- if (!audio_configured_) {
- return -1;
- }
- return audio_->SetAudioLevelIndicationStatus(enable, ID);
-}
-
-int32_t RTPSender::AudioLevelIndicationStatus(bool *enable,
- uint8_t* id) const {
- return audio_->AudioLevelIndicationStatus(*enable, *id);
-}
-
int32_t RTPSender::SetAudioLevel(const uint8_t level_d_bov) {
return audio_->SetAudioLevel(level_d_bov);
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h
index e1cc3a182a7..291e619b43a 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender.h
@@ -25,6 +25,7 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h"
#include "webrtc/modules/rtp_rtcp/source/ssrc_database.h"
#include "webrtc/modules/rtp_rtcp/source/video_codec_information.h"
+#include "webrtc/system_wrappers/interface/thread_annotations.h"
#define MAX_INIT_RTP_SEQ_NUMBER 32767 // 2^15 -1.
@@ -82,7 +83,8 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
// was sent within the statistics window.
bool GetSendSideDelay(int* avg_send_delay_ms, int* max_send_delay_ms) const;
- void SetTargetSendBitrate(const uint32_t bits);
+ void SetTargetBitrate(uint32_t bitrate);
+ uint32_t GetTargetBitrate();
virtual uint16_t MaxDataPayloadLength() const
OVERRIDE; // with RTP and FEC headers.
@@ -153,19 +155,15 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
uint16_t BuildRTPHeaderExtension(uint8_t* data_buffer) const;
- uint8_t BuildTransmissionTimeOffsetExtension(
- uint8_t *data_buffer) const;
- uint8_t BuildAbsoluteSendTimeExtension(
- uint8_t* data_buffer) const;
+ uint8_t BuildTransmissionTimeOffsetExtension(uint8_t *data_buffer) const;
+ uint8_t BuildAudioLevelExtension(uint8_t* data_buffer) const;
+ uint8_t BuildAbsoluteSendTimeExtension(uint8_t* data_buffer) const;
- bool UpdateTransmissionTimeOffset(uint8_t *rtp_packet,
- const uint16_t rtp_packet_length,
- const RTPHeader &rtp_header,
- const int64_t time_diff_ms) const;
- bool UpdateAbsoluteSendTime(uint8_t *rtp_packet,
- const uint16_t rtp_packet_length,
- const RTPHeader &rtp_header,
- const int64_t now_ms) const;
+ bool UpdateAudioLevel(uint8_t *rtp_packet,
+ const uint16_t rtp_packet_length,
+ const RTPHeader &rtp_header,
+ const bool is_voiced,
+ const uint8_t dBov) const;
bool TimeToSendPacket(uint16_t sequence_number, int64_t capture_time_ms,
bool retransmission);
@@ -187,10 +185,12 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
bool ProcessNACKBitRate(const uint32_t now);
// RTX.
- void SetRTXStatus(int mode, bool set_ssrc, uint32_t ssrc);
+ void SetRTXStatus(int mode);
void RTXStatus(int* mode, uint32_t* ssrc, int* payload_type) const;
+ void SetRtxSsrc(uint32_t ssrc);
+
void SetRtxPayloadType(int payloadType);
// Functions wrapping RTPSenderInterface.
@@ -228,12 +228,6 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
// packet in silence (CNG).
int32_t SetAudioPacketSize(const uint16_t packet_size_samples);
- // Set status and ID for header-extension-for-audio-level-indication.
- int32_t SetAudioLevelIndicationStatus(const bool enable, const uint8_t ID);
-
- // Get status and ID for header-extension-for-audio-level-indication.
- int32_t AudioLevelIndicationStatus(bool *enable, uint8_t *id) const;
-
// Store the audio level in d_bov for
// header-extension-for-audio-level-indication.
int32_t SetAudioLevel(const uint8_t level_d_bov);
@@ -303,7 +297,8 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
bool PrepareAndSendPacket(uint8_t* buffer,
uint16_t length,
int64_t capture_time_ms,
- bool send_over_rtx);
+ bool send_over_rtx,
+ bool is_retransmit);
int SendRedundantPayloads(int payload_type, int bytes);
@@ -319,6 +314,15 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
void UpdateDelayStatistics(int64_t capture_time_ms, int64_t now_ms);
+ void UpdateTransmissionTimeOffset(uint8_t *rtp_packet,
+ const uint16_t rtp_packet_length,
+ const RTPHeader &rtp_header,
+ const int64_t time_diff_ms) const;
+ void UpdateAbsoluteSendTime(uint8_t *rtp_packet,
+ const uint16_t rtp_packet_length,
+ const RTPHeader &rtp_header,
+ const int64_t now_ms) const;
+
void UpdateRtpStats(const uint8_t* buffer,
uint32_t size,
const RTPHeader& header,
@@ -338,13 +342,12 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
CriticalSectionWrapper *send_critsect_;
Transport *transport_;
- bool sending_media_;
+ bool sending_media_ GUARDED_BY(send_critsect_);
uint16_t max_payload_length_;
- uint16_t target_send_bitrate_;
uint16_t packet_over_head_;
- int8_t payload_type_;
+ int8_t payload_type_ GUARDED_BY(send_critsect_);
std::map<int8_t, ModuleRTPUtility::Payload *> payload_type_map_;
RtpHeaderExtensionMap rtp_header_extension_map_;
@@ -388,6 +391,13 @@ class RTPSender : public RTPSenderInterface, public Bitrate::Observer {
int rtx_;
uint32_t ssrc_rtx_;
int payload_type_rtx_;
+
+ // Note: Don't access this variable directly, always go through
+ // SetTargetBitrateKbps or GetTargetBitrateKbps. Also remember
+ // that by the time the function returns there is no guarantee
+ // that the target bitrate is still valid.
+ scoped_ptr<CriticalSectionWrapper> target_bitrate_critsect_;
+ uint32_t target_bitrate_ GUARDED_BY(target_bitrate_critsect_);
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc
index f800142886d..6b3e2276ee6 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc
@@ -17,7 +17,7 @@
namespace webrtc {
RTPSenderAudio::RTPSenderAudio(const int32_t id, Clock* clock,
- RTPSenderInterface* rtpSender) :
+ RTPSender* rtpSender) :
_id(id),
_clock(clock),
_rtpSender(rtpSender),
@@ -42,8 +42,6 @@ RTPSenderAudio::RTPSenderAudio(const int32_t id, Clock* clock,
_cngSWBPayloadType(-1),
_cngFBPayloadType(-1),
_lastPayloadType(-1),
- _includeAudioLevelIndication(false), // @TODO - reset at Init()?
- _audioLevelIndicationID(0),
_audioLevel_dBov(0) {
};
@@ -365,52 +363,12 @@ int32_t RTPSenderAudio::SendAudio(
if (rtpHeaderLength <= 0) {
return -1;
}
+ if (maxPayloadLength < (rtpHeaderLength + payloadSize)) {
+ // Too large payload buffer.
+ return -1;
+ }
{
CriticalSectionScoped cs(_sendAudioCritsect);
-
- // https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/
- if (_includeAudioLevelIndication) {
- dataBuffer[0] |= 0x10; // set eXtension bit
- /*
- 0 1 2 3
- 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- | 0xBE | 0xDE | length=1 |
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- | ID | len=0 |V| level | 0x00 | 0x00 |
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- */
- // add our ID (0xBEDE)
- ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+rtpHeaderLength,
- RTP_AUDIO_LEVEL_UNIQUE_ID);
- rtpHeaderLength += 2;
-
- // add the length (length=1) in number of word32
- const uint8_t length = 1;
- ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+rtpHeaderLength,
- length);
- rtpHeaderLength += 2;
-
- // add ID (defined by the user) and len(=0) byte
- const uint8_t id = _audioLevelIndicationID;
- const uint8_t len = 0;
- dataBuffer[rtpHeaderLength++] = (id << 4) + len;
-
- // add voice-activity flag (V) bit and the audio level (in dBov)
- const uint8_t V = (frameType == kAudioFrameSpeech);
- uint8_t level = _audioLevel_dBov;
- dataBuffer[rtpHeaderLength++] = (V << 7) + level;
-
- // add two bytes zero padding
- ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+rtpHeaderLength, 0);
- rtpHeaderLength += 2;
- }
-
- if(maxPayloadLength < rtpHeaderLength + payloadSize ) {
- // too large payload buffer
- return -1;
- }
-
if (_REDPayloadType >= 0 && // Have we configured RED?
fragmentation &&
fragmentation->fragmentationVectorSize > 1 &&
@@ -474,6 +432,17 @@ int32_t RTPSenderAudio::SendAudio(
}
}
_lastPayloadType = payloadType;
+
+ // Update audio level extension, if included.
+ {
+ uint16_t packetSize = payloadSize + rtpHeaderLength;
+ ModuleRTPUtility::RTPHeaderParser rtp_parser(dataBuffer, packetSize);
+ RTPHeader rtp_header;
+ rtp_parser.Parse(rtp_header);
+ _rtpSender->UpdateAudioLevel(dataBuffer, packetSize, rtp_header,
+ (frameType == kAudioFrameSpeech),
+ _audioLevel_dBov);
+ }
} // end critical section
TRACE_EVENT_ASYNC_END2("webrtc", "Audio", captureTimeStamp,
"timestamp", _rtpSender->Timestamp(),
@@ -486,32 +455,6 @@ int32_t RTPSenderAudio::SendAudio(
PacedSender::kHighPriority);
}
-int32_t
-RTPSenderAudio::SetAudioLevelIndicationStatus(const bool enable,
- const uint8_t ID)
-{
- if(enable && (ID < 1 || ID > 14))
- {
- return -1;
- }
- CriticalSectionScoped cs(_sendAudioCritsect);
-
- _includeAudioLevelIndication = enable;
- _audioLevelIndicationID = ID;
-
- return 0;
-}
-
-int32_t
-RTPSenderAudio::AudioLevelIndicationStatus(bool& enable,
- uint8_t& ID) const
-{
- CriticalSectionScoped cs(_sendAudioCritsect);
- enable = _includeAudioLevelIndication;
- ID = _audioLevelIndicationID;
- return 0;
-}
-
// Audio level magnitude and voice activity flag are set for each RTP packet
int32_t
RTPSenderAudio::SetAudioLevel(const uint8_t level_dBov)
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h
index 7074e7b29ad..732199c17a1 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h
@@ -23,7 +23,7 @@ class RTPSenderAudio: public DTMFqueue
{
public:
RTPSenderAudio(const int32_t id, Clock* clock,
- RTPSenderInterface* rtpSender);
+ RTPSender* rtpSender);
virtual ~RTPSenderAudio();
int32_t RegisterAudioPayload(
@@ -44,13 +44,6 @@ public:
// set audio packet size, used to determine when it's time to send a DTMF packet in silence (CNG)
int32_t SetAudioPacketSize(const uint16_t packetSizeSamples);
- // Set status and ID for header-extension-for-audio-level-indication.
- // Valid ID range is [1,14].
- int32_t SetAudioLevelIndicationStatus(const bool enable, const uint8_t ID);
-
- // Get status and ID for header-extension-for-audio-level-indication.
- int32_t AudioLevelIndicationStatus(bool& enable, uint8_t& ID) const;
-
// Store the audio level in dBov for header-extension-for-audio-level-indication.
// Valid range is [0,100]. Actual value is negative.
int32_t SetAudioLevel(const uint8_t level_dBov);
@@ -86,7 +79,7 @@ protected:
private:
int32_t _id;
Clock* _clock;
- RTPSenderInterface* _rtpSender;
+ RTPSender* _rtpSender;
CriticalSectionWrapper* _audioFeedbackCritsect;
RtpAudioFeedback* _audioFeedback;
@@ -117,8 +110,6 @@ private:
int8_t _lastPayloadType;
// Audio level indication (https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/)
- bool _includeAudioLevelIndication;
- uint8_t _audioLevelIndicationID;
uint8_t _audioLevel_dBov;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
index ce615be0494..18482890f7a 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_unittest.cc
@@ -160,11 +160,8 @@ TEST_F(RtpSenderTest, RegisterRtpAudioLevelHeaderExtension) {
EXPECT_EQ(0, rtp_sender_->RtpHeaderExtensionTotalLength());
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionAudioLevel, kAudioLevelExtensionId));
- // Accounted size for audio level is zero because it is currently specially
- // treated by RTPSenderAudio.
- EXPECT_EQ(0, rtp_sender_->RtpHeaderExtensionTotalLength());
- // EXPECT_EQ(kRtpOneByteHeaderLength + kAudioLevelLength,
- // rtp_sender_->RtpHeaderExtensionTotalLength());
+ EXPECT_EQ(kRtpOneByteHeaderLength + kAudioLevelLength,
+ rtp_sender_->RtpHeaderExtensionTotalLength());
EXPECT_EQ(0, rtp_sender_->DeregisterRtpHeaderExtension(
kRtpExtensionAudioLevel));
EXPECT_EQ(0, rtp_sender_->RtpHeaderExtensionTotalLength());
@@ -183,14 +180,16 @@ TEST_F(RtpSenderTest, RegisterRtpHeaderExtensions) {
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionAudioLevel, kAudioLevelExtensionId));
EXPECT_EQ(kRtpOneByteHeaderLength + kTransmissionTimeOffsetLength +
- kAbsoluteSendTimeLength, rtp_sender_->RtpHeaderExtensionTotalLength());
+ kAbsoluteSendTimeLength + kAudioLevelLength,
+ rtp_sender_->RtpHeaderExtensionTotalLength());
EXPECT_EQ(0, rtp_sender_->DeregisterRtpHeaderExtension(
kRtpExtensionTransmissionTimeOffset));
- EXPECT_EQ(kRtpOneByteHeaderLength + kAbsoluteSendTimeLength,
- rtp_sender_->RtpHeaderExtensionTotalLength());
+ EXPECT_EQ(kRtpOneByteHeaderLength + kAbsoluteSendTimeLength +
+ kAudioLevelLength, rtp_sender_->RtpHeaderExtensionTotalLength());
EXPECT_EQ(0, rtp_sender_->DeregisterRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime));
- EXPECT_EQ(0, rtp_sender_->RtpHeaderExtensionTotalLength());
+ EXPECT_EQ(kRtpOneByteHeaderLength + kAudioLevelLength,
+ rtp_sender_->RtpHeaderExtensionTotalLength());
EXPECT_EQ(0, rtp_sender_->DeregisterRtpHeaderExtension(
kRtpExtensionAudioLevel));
EXPECT_EQ(0, rtp_sender_->RtpHeaderExtensionTotalLength());
@@ -202,23 +201,24 @@ TEST_F(RtpSenderTest, BuildRTPPacket) {
kMarkerBit,
kTimestamp,
0);
- EXPECT_EQ(12, length);
+ EXPECT_EQ(kRtpHeaderSize, length);
// Verify
webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
webrtc::RTPHeader rtp_header;
- RtpHeaderExtensionMap map;
- map.Register(kRtpExtensionTransmissionTimeOffset,
- kTransmissionTimeOffsetExtensionId);
- const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
+ const bool valid_rtp_header = rtp_parser.Parse(rtp_header, NULL);
ASSERT_TRUE(valid_rtp_header);
ASSERT_FALSE(rtp_parser.RTCP());
VerifyRTPHeaderCommon(rtp_header);
EXPECT_EQ(length, rtp_header.headerLength);
+ EXPECT_FALSE(rtp_header.extension.hasTransmissionTimeOffset);
+ EXPECT_FALSE(rtp_header.extension.hasAbsoluteSendTime);
+ EXPECT_FALSE(rtp_header.extension.hasAudioLevel);
EXPECT_EQ(0, rtp_header.extension.transmissionTimeOffset);
EXPECT_EQ(0u, rtp_header.extension.absoluteSendTime);
+ EXPECT_EQ(0u, rtp_header.extension.audioLevel);
}
TEST_F(RtpSenderTest, BuildRTPPacketWithTransmissionOffsetExtension) {
@@ -231,7 +231,8 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithTransmissionOffsetExtension) {
kMarkerBit,
kTimestamp,
0);
- EXPECT_EQ(12 + rtp_sender_->RtpHeaderExtensionTotalLength(), length);
+ EXPECT_EQ(kRtpHeaderSize + rtp_sender_->RtpHeaderExtensionTotalLength(),
+ length);
// Verify
webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
@@ -246,6 +247,7 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithTransmissionOffsetExtension) {
ASSERT_FALSE(rtp_parser.RTCP());
VerifyRTPHeaderCommon(rtp_header);
EXPECT_EQ(length, rtp_header.headerLength);
+ EXPECT_TRUE(rtp_header.extension.hasTransmissionTimeOffset);
EXPECT_EQ(kTimeOffset, rtp_header.extension.transmissionTimeOffset);
// Parse without map extension
@@ -255,6 +257,7 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithTransmissionOffsetExtension) {
ASSERT_TRUE(valid_rtp_header2);
VerifyRTPHeaderCommon(rtp_header2);
EXPECT_EQ(length, rtp_header2.headerLength);
+ EXPECT_FALSE(rtp_header2.extension.hasTransmissionTimeOffset);
EXPECT_EQ(0, rtp_header2.extension.transmissionTimeOffset);
}
@@ -269,7 +272,8 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithNegativeTransmissionOffsetExtension) {
kMarkerBit,
kTimestamp,
0);
- EXPECT_EQ(12 + rtp_sender_->RtpHeaderExtensionTotalLength(), length);
+ EXPECT_EQ(kRtpHeaderSize + rtp_sender_->RtpHeaderExtensionTotalLength(),
+ length);
// Verify
webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
@@ -284,6 +288,7 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithNegativeTransmissionOffsetExtension) {
ASSERT_FALSE(rtp_parser.RTCP());
VerifyRTPHeaderCommon(rtp_header);
EXPECT_EQ(length, rtp_header.headerLength);
+ EXPECT_TRUE(rtp_header.extension.hasTransmissionTimeOffset);
EXPECT_EQ(kNegTimeOffset, rtp_header.extension.transmissionTimeOffset);
}
@@ -297,7 +302,8 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithAbsoluteSendTimeExtension) {
kMarkerBit,
kTimestamp,
0);
- EXPECT_EQ(12 + rtp_sender_->RtpHeaderExtensionTotalLength(), length);
+ EXPECT_EQ(kRtpHeaderSize + rtp_sender_->RtpHeaderExtensionTotalLength(),
+ length);
// Verify
webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
@@ -311,6 +317,7 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithAbsoluteSendTimeExtension) {
ASSERT_FALSE(rtp_parser.RTCP());
VerifyRTPHeaderCommon(rtp_header);
EXPECT_EQ(length, rtp_header.headerLength);
+ EXPECT_TRUE(rtp_header.extension.hasAbsoluteSendTime);
EXPECT_EQ(kAbsoluteSendTime, rtp_header.extension.absoluteSendTime);
// Parse without map extension
@@ -320,9 +327,54 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithAbsoluteSendTimeExtension) {
ASSERT_TRUE(valid_rtp_header2);
VerifyRTPHeaderCommon(rtp_header2);
EXPECT_EQ(length, rtp_header2.headerLength);
+ EXPECT_FALSE(rtp_header2.extension.hasAbsoluteSendTime);
EXPECT_EQ(0u, rtp_header2.extension.absoluteSendTime);
}
+TEST_F(RtpSenderTest, BuildRTPPacketWithAudioLevelExtension) {
+ EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
+ kRtpExtensionAudioLevel, kAudioLevelExtensionId));
+
+ int32_t length = rtp_sender_->BuildRTPheader(packet_,
+ kPayload,
+ kMarkerBit,
+ kTimestamp,
+ 0);
+ EXPECT_EQ(kRtpHeaderSize + rtp_sender_->RtpHeaderExtensionTotalLength(),
+ length);
+
+ // Verify
+ webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
+ webrtc::RTPHeader rtp_header;
+
+ // Updating audio level is done in RTPSenderAudio, so simulate it here.
+ rtp_parser.Parse(rtp_header);
+ rtp_sender_->UpdateAudioLevel(packet_, length, rtp_header, true, kAudioLevel);
+
+ RtpHeaderExtensionMap map;
+ map.Register(kRtpExtensionAudioLevel, kAudioLevelExtensionId);
+ const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
+
+ ASSERT_TRUE(valid_rtp_header);
+ ASSERT_FALSE(rtp_parser.RTCP());
+ VerifyRTPHeaderCommon(rtp_header);
+ EXPECT_EQ(length, rtp_header.headerLength);
+ EXPECT_TRUE(rtp_header.extension.hasAudioLevel);
+ // Expect kAudioLevel + 0x80 because we set "voiced" to true in the call to
+ // UpdateAudioLevel(), above.
+ EXPECT_EQ(kAudioLevel + 0x80u, rtp_header.extension.audioLevel);
+
+ // Parse without map extension
+ webrtc::RTPHeader rtp_header2;
+ const bool valid_rtp_header2 = rtp_parser.Parse(rtp_header2, NULL);
+
+ ASSERT_TRUE(valid_rtp_header2);
+ VerifyRTPHeaderCommon(rtp_header2);
+ EXPECT_EQ(length, rtp_header2.headerLength);
+ EXPECT_FALSE(rtp_header2.extension.hasAudioLevel);
+ EXPECT_EQ(0u, rtp_header2.extension.audioLevel);
+}
+
TEST_F(RtpSenderTest, BuildRTPPacketWithHeaderExtensions) {
EXPECT_EQ(0, rtp_sender_->SetTransmissionTimeOffset(kTimeOffset));
EXPECT_EQ(0, rtp_sender_->SetAbsoluteSendTime(kAbsoluteSendTime));
@@ -330,30 +382,42 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithHeaderExtensions) {
kRtpExtensionTransmissionTimeOffset, kTransmissionTimeOffsetExtensionId));
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId));
+ EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
+ kRtpExtensionAudioLevel, kAudioLevelExtensionId));
int32_t length = rtp_sender_->BuildRTPheader(packet_,
kPayload,
kMarkerBit,
kTimestamp,
0);
- EXPECT_EQ(12 + rtp_sender_->RtpHeaderExtensionTotalLength(), length);
+ EXPECT_EQ(kRtpHeaderSize + rtp_sender_->RtpHeaderExtensionTotalLength(),
+ length);
// Verify
webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
webrtc::RTPHeader rtp_header;
+ // Updating audio level is done in RTPSenderAudio, so simulate it here.
+ rtp_parser.Parse(rtp_header);
+ rtp_sender_->UpdateAudioLevel(packet_, length, rtp_header, true, kAudioLevel);
+
RtpHeaderExtensionMap map;
map.Register(kRtpExtensionTransmissionTimeOffset,
kTransmissionTimeOffsetExtensionId);
map.Register(kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId);
+ map.Register(kRtpExtensionAudioLevel, kAudioLevelExtensionId);
const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
ASSERT_TRUE(valid_rtp_header);
ASSERT_FALSE(rtp_parser.RTCP());
VerifyRTPHeaderCommon(rtp_header);
EXPECT_EQ(length, rtp_header.headerLength);
+ EXPECT_TRUE(rtp_header.extension.hasTransmissionTimeOffset);
+ EXPECT_TRUE(rtp_header.extension.hasAbsoluteSendTime);
+ EXPECT_TRUE(rtp_header.extension.hasAudioLevel);
EXPECT_EQ(kTimeOffset, rtp_header.extension.transmissionTimeOffset);
EXPECT_EQ(kAbsoluteSendTime, rtp_header.extension.absoluteSendTime);
+ EXPECT_EQ(kAudioLevel + 0x80u, rtp_header.extension.audioLevel);
// Parse without map extension
webrtc::RTPHeader rtp_header2;
@@ -362,8 +426,12 @@ TEST_F(RtpSenderTest, BuildRTPPacketWithHeaderExtensions) {
ASSERT_TRUE(valid_rtp_header2);
VerifyRTPHeaderCommon(rtp_header2);
EXPECT_EQ(length, rtp_header2.headerLength);
+ EXPECT_FALSE(rtp_header2.extension.hasTransmissionTimeOffset);
+ EXPECT_FALSE(rtp_header2.extension.hasAbsoluteSendTime);
+ EXPECT_FALSE(rtp_header2.extension.hasAudioLevel);
EXPECT_EQ(0, rtp_header2.extension.transmissionTimeOffset);
EXPECT_EQ(0u, rtp_header2.extension.absoluteSendTime);
+ EXPECT_EQ(0u, rtp_header2.extension.audioLevel);
}
TEST_F(RtpSenderTest, TrafficSmoothingWithExtensions) {
@@ -376,7 +444,7 @@ TEST_F(RtpSenderTest, TrafficSmoothingWithExtensions) {
kRtpExtensionTransmissionTimeOffset, kTransmissionTimeOffsetExtensionId));
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId));
- rtp_sender_->SetTargetSendBitrate(300000);
+ rtp_sender_->SetTargetBitrate(300000);
int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
int32_t rtp_length = rtp_sender_->BuildRTPheader(packet_,
kPayload,
@@ -430,7 +498,7 @@ TEST_F(RtpSenderTest, TrafficSmoothingRetransmits) {
kRtpExtensionTransmissionTimeOffset, kTransmissionTimeOffsetExtensionId));
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId));
- rtp_sender_->SetTargetSendBitrate(300000);
+ rtp_sender_->SetTargetBitrate(300000);
int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
int32_t rtp_length = rtp_sender_->BuildRTPheader(packet_,
kPayload,
@@ -493,7 +561,7 @@ TEST_F(RtpSenderTest, SendPadding) {
uint16_t seq_num = kSeqNum;
uint32_t timestamp = kTimestamp;
rtp_sender_->SetStorePacketsStatus(true, 10);
- int rtp_header_len = 12;
+ int32_t rtp_header_len = kRtpHeaderSize;
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionTransmissionTimeOffset, kTransmissionTimeOffsetExtensionId));
rtp_header_len += 4; // 4 bytes extension.
@@ -512,7 +580,7 @@ TEST_F(RtpSenderTest, SendPadding) {
kAbsoluteSendTimeExtensionId);
webrtc::RTPHeader rtp_header;
- rtp_sender_->SetTargetSendBitrate(300000);
+ rtp_sender_->SetTargetBitrate(300000);
int64_t capture_time_ms = fake_clock_.TimeInMilliseconds();
int32_t rtp_length = rtp_sender_->BuildRTPheader(packet_,
kPayload,
@@ -613,14 +681,14 @@ TEST_F(RtpSenderTest, SendRedundantPayloads) {
uint16_t seq_num = kSeqNum;
rtp_sender_->SetStorePacketsStatus(true, 10);
- int rtp_header_len = 12;
+ int32_t rtp_header_len = kRtpHeaderSize;
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionAbsoluteSendTime, kAbsoluteSendTimeExtensionId));
rtp_header_len += 4; // 4 bytes extension.
rtp_header_len += 4; // 4 extra bytes common to all extension headers.
- rtp_sender_->SetRTXStatus(kRtxRetransmitted | kRtxRedundantPayloads, true,
- 1234);
+ rtp_sender_->SetRTXStatus(kRtxRetransmitted | kRtxRedundantPayloads);
+ rtp_sender_->SetRtxSsrc(1234);
// Create and set up parser.
scoped_ptr<webrtc::RtpHeaderParser> rtp_parser(
@@ -630,7 +698,7 @@ TEST_F(RtpSenderTest, SendRedundantPayloads) {
kTransmissionTimeOffsetExtensionId);
rtp_parser->RegisterRtpHeaderExtension(kRtpExtensionAbsoluteSendTime,
kAbsoluteSendTimeExtensionId);
- rtp_sender_->SetTargetSendBitrate(300000);
+ rtp_sender_->SetTargetBitrate(300000);
const size_t kNumPayloadSizes = 10;
const int kPayloadSizes[kNumPayloadSizes] = {500, 550, 600, 650, 700, 750,
800, 850, 900, 950};
@@ -939,48 +1007,6 @@ TEST_F(RtpSenderTest, StreamDataCountersCallbacks) {
rtp_sender_->RegisterRtpStatisticsCallback(NULL);
}
-TEST_F(RtpSenderAudioTest, BuildRTPPacketWithAudioLevelExtension) {
- EXPECT_EQ(0, rtp_sender_->SetAudioLevelIndicationStatus(true,
- kAudioLevelExtensionId));
- EXPECT_EQ(0, rtp_sender_->SetAudioLevel(kAudioLevel));
- EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
- kRtpExtensionAudioLevel, kAudioLevelExtensionId));
-
- int32_t length = rtp_sender_->BuildRTPheader(packet_,
- kAudioPayload,
- kMarkerBit,
- kTimestamp,
- 0);
- EXPECT_EQ(12 + rtp_sender_->RtpHeaderExtensionTotalLength(), length);
-
- // Currently, no space is added by for header extension by BuildRTPHeader().
- EXPECT_EQ(0, rtp_sender_->RtpHeaderExtensionTotalLength());
-
- // Verify
- webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
- webrtc::RTPHeader rtp_header;
-
- RtpHeaderExtensionMap map;
- map.Register(kRtpExtensionAudioLevel, kAudioLevelExtensionId);
- const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
-
- ASSERT_TRUE(valid_rtp_header);
- ASSERT_FALSE(rtp_parser.RTCP());
- VerifyRTPHeaderCommon(rtp_header);
- EXPECT_EQ(length, rtp_header.headerLength);
- // TODO(solenberg): Should verify that we got audio level in header extension.
-
- // Parse without map extension
- webrtc::RTPHeader rtp_header2;
- const bool valid_rtp_header2 = rtp_parser.Parse(rtp_header2, NULL);
-
- ASSERT_TRUE(valid_rtp_header2);
- VerifyRTPHeaderCommon(rtp_header2);
- EXPECT_EQ(length, rtp_header2.headerLength);
- // TODO(solenberg): Should verify that we didn't get audio level.
- EXPECT_EQ(0, rtp_sender_->SetAudioLevelIndicationStatus(false, 0));
-}
-
TEST_F(RtpSenderAudioTest, SendAudio) {
char payload_name[RTP_PAYLOAD_NAME_SIZE] = "PAYLOAD_NAME";
const uint8_t payload_type = 127;
@@ -1007,8 +1033,6 @@ TEST_F(RtpSenderAudioTest, SendAudio) {
}
TEST_F(RtpSenderAudioTest, SendAudioWithAudioLevelExtension) {
- EXPECT_EQ(0, rtp_sender_->SetAudioLevelIndicationStatus(true,
- kAudioLevelExtensionId));
EXPECT_EQ(0, rtp_sender_->SetAudioLevel(kAudioLevel));
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(
kRtpExtensionAudioLevel, kAudioLevelExtensionId));
@@ -1044,7 +1068,6 @@ TEST_F(RtpSenderAudioTest, SendAudioWithAudioLevelExtension) {
EXPECT_EQ(0, memcmp(extension, payload_data - sizeof(extension),
sizeof(extension)));
- EXPECT_EQ(0, rtp_sender_->SetAudioLevelIndicationStatus(false, 0));
}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
index 7b36f7cced8..5d8ae166565 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc
@@ -14,12 +14,13 @@
#include <stdlib.h>
#include <string.h>
+#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/producer_fec.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
@@ -30,11 +31,9 @@ struct RtpPacket {
ForwardErrorCorrection::Packet* pkt;
};
-RTPSenderVideo::RTPSenderVideo(const int32_t id,
- Clock* clock,
+RTPSenderVideo::RTPSenderVideo(Clock* clock,
RTPSenderInterface* rtpSender)
- : _id(id),
- _rtpSender(*rtpSender),
+ : _rtpSender(*rtpSender),
_sendVideoCritsect(CriticalSectionWrapper::CreateCriticalSection()),
_videoType(kRtpVideoGeneric),
_videoCodecInformation(NULL),
@@ -42,7 +41,7 @@ RTPSenderVideo::RTPSenderVideo(const int32_t id,
_retransmissionSettings(kRetransmitBaseLayer),
// Generic FEC
- _fec(id),
+ _fec(),
_fecEnabled(false),
_payloadTypeRED(-1),
_payloadTypeFEC(-1),
@@ -253,8 +252,13 @@ RTPSenderVideo::FECPacketOverhead() const
{
if (_fecEnabled)
{
- return ForwardErrorCorrection::PacketOverhead() +
- REDForFECHeaderLength;
+ // Overhead is FEC headers plus RED for FEC header plus anything in RTP
+ // header beyond the 12 bytes base header (CSRC list, extensions...)
+ // This reason for the header extensions to be included here is that
+ // from an FEC viewpoint, they are part of the payload to be protected.
+ // (The base RTP header is already protected by the FEC header.)
+ return ForwardErrorCorrection::PacketOverhead() + REDForFECHeaderLength +
+ (_rtpSender.RTPHeaderLength() - kRtpHeaderSize);
}
return 0;
}
@@ -323,8 +327,6 @@ RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
{
return retVal;
}
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id, "%s(timestamp:%u)",
- __FUNCTION__, captureTimeStamp);
return 0;
}
@@ -470,9 +472,9 @@ RTPSenderVideo::SendVP8(const FrameType frameType,
rtpHeaderLength, captureTimeStamp,
capture_time_ms, storage, protect))
{
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "RTPSenderVideo::SendVP8 failed to send packet number"
- " %d", _rtpSender.SequenceNumber());
+ LOG(LS_WARNING)
+ << "RTPSenderVideo::SendVP8 failed to send packet number "
+ << _rtpSender.SequenceNumber();
}
}
TRACE_EVENT_ASYNC_END1("webrtc", "Video", capture_time_ms,
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
index 4c406d75014..daa730e8c24 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_sender_video.h
@@ -31,7 +31,7 @@ struct RtpPacket;
class RTPSenderVideo
{
public:
- RTPSenderVideo(const int32_t id, Clock* clock,
+ RTPSenderVideo(Clock* clock,
RTPSenderInterface* rtpSender);
virtual ~RTPSenderVideo();
@@ -112,7 +112,6 @@ private:
const RTPVideoTypeHeader* rtpTypeHdr);
private:
- int32_t _id;
RTPSenderInterface& _rtpSender;
CriticalSectionWrapper* _sendVideoCritsect;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
index 102ebecb02e..c1f3c642749 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
@@ -30,7 +30,7 @@
#endif
#include "webrtc/system_wrappers/interface/tick_util.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#if (defined(_DEBUG) && defined(_WIN32) && (_MSC_VER >= 1400))
#define DEBUG_PRINT(...) \
@@ -398,6 +398,10 @@ bool RTPHeaderParser::Parse(RTPHeader& header,
header.extension.hasAbsoluteSendTime = false;
header.extension.absoluteSendTime = 0;
+ // May not be present in packet.
+ header.extension.hasAudioLevel = false;
+ header.extension.audioLevel = 0;
+
if (X) {
/* RTP header extension, RFC 3550.
0 1 2 3
@@ -453,89 +457,96 @@ void RTPHeaderParser::ParseOneByteExtensionHeader(
// | ID | len |
// +-+-+-+-+-+-+-+-+
+ // Note that 'len' is the header extension element length, which is the
+ // number of bytes - 1.
const uint8_t id = (*ptr & 0xf0) >> 4;
const uint8_t len = (*ptr & 0x0f);
ptr++;
if (id == 15) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
- "Ext id: 15 encountered, parsing terminated.");
+ LOG(LS_WARNING)
+ << "RTP extension header 15 encountered. Terminate parsing.";
return;
}
RTPExtensionType type;
if (ptrExtensionMap->GetType(id, &type) != 0) {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "Failed to find extension id: %d", id);
- return;
- }
-
- switch (type) {
- case kRtpExtensionTransmissionTimeOffset: {
- if (len != 2) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
- "Incorrect transmission time offset len: %d", len);
- return;
+ // If we encounter an unknown extension, just skip over it.
+ LOG(LS_WARNING) << "Failed to find extension id: "
+ << static_cast<int>(id);
+ } else {
+ switch (type) {
+ case kRtpExtensionTransmissionTimeOffset: {
+ if (len != 2) {
+ LOG(LS_WARNING) << "Incorrect transmission time offset len: "
+ << len;
+ return;
+ }
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | ID | len=2 | transmission offset |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+ int32_t transmissionTimeOffset = ptr[0] << 16;
+ transmissionTimeOffset += ptr[1] << 8;
+ transmissionTimeOffset += ptr[2];
+ header.extension.transmissionTimeOffset =
+ transmissionTimeOffset;
+ if (transmissionTimeOffset & 0x800000) {
+ // Negative offset, correct sign for Word24 to Word32.
+ header.extension.transmissionTimeOffset |= 0xFF000000;
+ }
+ header.extension.hasTransmissionTimeOffset = true;
+ break;
}
- // 0 1 2 3
- // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
- // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- // | ID | len=2 | transmission offset |
- // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
- int32_t transmissionTimeOffset = *ptr++ << 16;
- transmissionTimeOffset += *ptr++ << 8;
- transmissionTimeOffset += *ptr++;
- header.extension.transmissionTimeOffset =
- transmissionTimeOffset;
- if (transmissionTimeOffset & 0x800000) {
- // Negative offset, correct sign for Word24 to Word32.
- header.extension.transmissionTimeOffset |= 0xFF000000;
+ case kRtpExtensionAudioLevel: {
+ if (len != 0) {
+ LOG(LS_WARNING) << "Incorrect audio level len: " << len;
+ return;
+ }
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | ID | len=0 |V| level | 0x00 | 0x00 |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ //
+
+ // Parse out the fields but only use it for debugging for now.
+ // const uint8_t V = (*ptr & 0x80) >> 7;
+ // const uint8_t level = (*ptr & 0x7f);
+ // DEBUG_PRINT("RTP_AUDIO_LEVEL_UNIQUE_ID: ID=%u, len=%u, V=%u,
+ // level=%u", ID, len, V, level);
+
+ header.extension.audioLevel = ptr[0];
+ header.extension.hasAudioLevel = true;
+ break;
}
- header.extension.hasTransmissionTimeOffset = true;
- break;
- }
- case kRtpExtensionAudioLevel: {
- // --- Only used for debugging ---
- // 0 1 2 3
- // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
- // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- // | ID | len=0 |V| level | 0x00 | 0x00 |
- // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- //
-
- // Parse out the fields but only use it for debugging for now.
- // const uint8_t V = (*ptr & 0x80) >> 7;
- // const uint8_t level = (*ptr & 0x7f);
- // DEBUG_PRINT("RTP_AUDIO_LEVEL_UNIQUE_ID: ID=%u, len=%u, V=%u,
- // level=%u", ID, len, V, level);
- break;
- }
- case kRtpExtensionAbsoluteSendTime: {
- if (len != 2) {
- WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
- "Incorrect absolute send time len: %d", len);
+ case kRtpExtensionAbsoluteSendTime: {
+ if (len != 2) {
+ LOG(LS_WARNING) << "Incorrect absolute send time len: " << len;
+ return;
+ }
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | ID | len=2 | absolute send time |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+ uint32_t absoluteSendTime = ptr[0] << 16;
+ absoluteSendTime += ptr[1] << 8;
+ absoluteSendTime += ptr[2];
+ header.extension.absoluteSendTime = absoluteSendTime;
+ header.extension.hasAbsoluteSendTime = true;
+ break;
+ }
+ default: {
+ LOG(LS_WARNING) << "Extension type not implemented: " << type;
return;
}
- // 0 1 2 3
- // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
- // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- // | ID | len=2 | absolute send time |
- // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
-
- uint32_t absoluteSendTime = *ptr++ << 16;
- absoluteSendTime += *ptr++ << 8;
- absoluteSendTime += *ptr++;
- header.extension.absoluteSendTime = absoluteSendTime;
- header.extension.hasAbsoluteSendTime = true;
- break;
- }
- default: {
- WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
- "Extension type not implemented.");
- return;
}
}
+ ptr += (len + 1);
uint8_t num_bytes = ParsePaddingBytes(ptrRTPDataExtensionEnd, ptr);
ptr += num_bytes;
}
@@ -556,17 +567,12 @@ uint8_t RTPHeaderParser::ParsePaddingBytes(
return num_zero_bytes;
}
-// RTP payload parser
RTPPayloadParser::RTPPayloadParser(const RtpVideoCodecTypes videoType,
const uint8_t* payloadData,
- uint16_t payloadDataLength,
- int32_t id)
- :
- _id(id),
- _dataPtr(payloadData),
- _dataLength(payloadDataLength),
- _videoType(videoType) {
-}
+ uint16_t payloadDataLength)
+ : _dataPtr(payloadData),
+ _dataLength(payloadDataLength),
+ _videoType(videoType) {}
RTPPayloadParser::~RTPPayloadParser() {
}
@@ -641,8 +647,7 @@ bool RTPPayloadParser::ParseVP8(RTPPayload& parsedPacket) const {
}
if (dataLength <= 0) {
- WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
- "Error parsing VP8 payload descriptor; payload too short");
+ LOG(LS_ERROR) << "Error parsing VP8 payload descriptor!";
return false;
}
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h
index 8002273c374..732301f6fbe 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility.h
@@ -166,8 +166,8 @@ namespace ModuleRTPUtility
public:
RTPPayloadParser(const RtpVideoCodecTypes payloadType,
const uint8_t* payloadData,
- const uint16_t payloadDataLength, // Length w/o padding.
- const int32_t id);
+ // Length w/o padding.
+ const uint16_t payloadDataLength);
~RTPPayloadParser();
@@ -202,7 +202,6 @@ namespace ModuleRTPUtility
int dataLength) const;
private:
- int32_t _id;
const uint8_t* _dataPtr;
const uint16_t _dataLength;
const RtpVideoCodecTypes _videoType;
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility_unittest.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility_unittest.cc
index 02a89fc4fd0..d33eaf4c849 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility_unittest.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/rtp_utility_unittest.cc
@@ -76,7 +76,7 @@ TEST(ParseVP8Test, BasicHeader) {
payload[0] = 0x14; // Binary 0001 0100; S = 1, PartID = 4.
payload[1] = 0x01; // P frame.
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 4, 0);
+ RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 4);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
@@ -97,7 +97,7 @@ TEST(ParseVP8Test, PictureID) {
payload[1] = 0x80;
payload[2] = 17;
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10, 0);
+ RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
@@ -117,7 +117,7 @@ TEST(ParseVP8Test, PictureID) {
// Re-use payload, but change to long PictureID.
payload[2] = 0x80 | 17;
payload[3] = 17;
- RTPPayloadParser rtpPayloadParser2(kRtpVideoVp8, payload, 10, 0);
+ RTPPayloadParser rtpPayloadParser2(kRtpVideoVp8, payload, 10);
ASSERT_TRUE(rtpPayloadParser2.Parse(parsedPacket));
@@ -136,7 +136,7 @@ TEST(ParseVP8Test, Tl0PicIdx) {
payload[1] = 0x40;
payload[2] = 17;
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 13, 0);
+ RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 13);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
@@ -159,7 +159,7 @@ TEST(ParseVP8Test, TIDAndLayerSync) {
payload[1] = 0x20;
payload[2] = 0x80; // TID(2) + LayerSync(false)
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10, 0);
+ RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
@@ -183,7 +183,7 @@ TEST(ParseVP8Test, KeyIdx) {
payload[1] = 0x10; // K = 1.
payload[2] = 0x11; // KEYIDX = 17 decimal.
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10, 0);
+ RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
@@ -209,7 +209,7 @@ TEST(ParseVP8Test, MultipleExtensions) {
payload[4] = 42; // Tl0PicIdx.
payload[5] = 0x40 | 0x20 | 0x11; // TID(1) + LayerSync(true) + KEYIDX(17).
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10, 0);
+ RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 10);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
@@ -236,7 +236,7 @@ TEST(ParseVP8Test, TooShortHeader) {
payload[2] = 0x80 | 17; // ... but only 2 bytes PictureID is provided.
payload[3] = 17; // PictureID, low 8 bits.
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 4, 0);
+ RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, payload, 4);
RTPPayload parsedPacket;
EXPECT_FALSE(rtpPayloadParser.Parse(parsedPacket));
@@ -258,7 +258,7 @@ TEST(ParseVP8Test, TestWithPacketizer) {
ASSERT_EQ(0, packetizer.NextPacket(packet, &send_bytes, &last));
ASSERT_TRUE(last);
- RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, packet, send_bytes, 0);
+ RTPPayloadParser rtpPayloadParser(kRtpVideoVp8, packet, send_bytes);
RTPPayload parsedPacket;
ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc b/chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc
index 1e57970c0d0..df09b01bdf2 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/ssrc_database.cc
@@ -14,7 +14,6 @@
#include <stdlib.h>
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#ifdef _WIN32
#include <windows.h>
@@ -185,8 +184,6 @@ SSRCDatabase::SSRCDatabase()
_ssrcVector = new uint32_t[10];
#endif
_critSect = CriticalSectionWrapper::CreateCriticalSection();
-
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, -1, "%s created", __FUNCTION__);
}
SSRCDatabase::~SSRCDatabase()
@@ -197,8 +194,6 @@ SSRCDatabase::~SSRCDatabase()
_ssrcMap.clear();
#endif
delete _critSect;
-
- WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, -1, "%s deleted", __FUNCTION__);
}
uint32_t SSRCDatabase::GenerateRandom()
diff --git a/chromium/third_party/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.h b/chromium/third_party/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.h
index 2ca5f287afa..cbb1207b8ec 100644
--- a/chromium/third_party/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.h
+++ b/chromium/third_party/webrtc/modules/rtp_rtcp/source/vp8_partition_aggregator.h
@@ -13,8 +13,8 @@
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/interface/module_common_types.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/typedefs.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/utility/interface/mock/mock_process_thread.h b/chromium/third_party/webrtc/modules/utility/interface/mock/mock_process_thread.h
new file mode 100644
index 00000000000..fc0c1fb1ce1
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/utility/interface/mock/mock_process_thread.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_MOCK_PROCESS_THREAD_H_
+#define WEBRTC_MODULES_UTILITY_INTERFACE_MOCK_PROCESS_THREAD_H_
+
+#include "webrtc/modules/utility/interface/process_thread.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+
+namespace webrtc {
+
+class MockProcessThread : public ProcessThread {
+ public:
+ MOCK_METHOD0(Start, int32_t());
+ MOCK_METHOD0(Stop, int32_t());
+ MOCK_METHOD1(RegisterModule, int32_t(Module* module));
+ MOCK_METHOD1(DeRegisterModule, int32_t(const Module* module));
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_INTERFACE_MOCK_PROCESS_THREAD_H_
diff --git a/chromium/third_party/webrtc/modules/utility/interface/process_thread.h b/chromium/third_party/webrtc/modules/utility/interface/process_thread.h
index cdbb4d39127..4db92a308a8 100644
--- a/chromium/third_party/webrtc/modules/utility/interface/process_thread.h
+++ b/chromium/third_party/webrtc/modules/utility/interface/process_thread.h
@@ -25,7 +25,7 @@ public:
virtual int32_t Start() = 0;
virtual int32_t Stop() = 0;
- virtual int32_t RegisterModule(const Module* module) = 0;
+ virtual int32_t RegisterModule(Module* module) = 0;
virtual int32_t DeRegisterModule(const Module* module) = 0;
protected:
virtual ~ProcessThread();
diff --git a/chromium/third_party/webrtc/modules/utility/source/OWNERS b/chromium/third_party/webrtc/modules/utility/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/utility/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/utility/source/audio_frame_operations.cc b/chromium/third_party/webrtc/modules/utility/source/audio_frame_operations.cc
index 18dba52b651..e3b00104761 100644
--- a/chromium/third_party/webrtc/modules/utility/source/audio_frame_operations.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/audio_frame_operations.cc
@@ -72,7 +72,6 @@ void AudioFrameOperations::SwapStereoChannels(AudioFrame* frame) {
void AudioFrameOperations::Mute(AudioFrame& frame) {
memset(frame.data_, 0, sizeof(int16_t) *
frame.samples_per_channel_ * frame.num_channels_);
- frame.energy_ = 0;
}
int AudioFrameOperations::Scale(float left, float right, AudioFrame& frame) {
diff --git a/chromium/third_party/webrtc/modules/utility/source/audio_frame_operations_unittest.cc b/chromium/third_party/webrtc/modules/utility/source/audio_frame_operations_unittest.cc
index 34c08a89e6b..f4d881cf871 100644
--- a/chromium/third_party/webrtc/modules/utility/source/audio_frame_operations_unittest.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/audio_frame_operations_unittest.cc
@@ -142,17 +142,13 @@ TEST_F(AudioFrameOperationsTest, SwapStereoChannelsFailsOnMono) {
TEST_F(AudioFrameOperationsTest, MuteSucceeds) {
SetFrameData(&frame_, 1000, 1000);
- frame_.energy_ = 1000 * 1000 * frame_.samples_per_channel_ *
- frame_.num_channels_;
AudioFrameOperations::Mute(frame_);
AudioFrame muted_frame;
muted_frame.samples_per_channel_ = 320;
muted_frame.num_channels_ = 2;
SetFrameData(&muted_frame, 0, 0);
- muted_frame.energy_ = 0;
VerifyFramesAreEqual(muted_frame, frame_);
- EXPECT_EQ(muted_frame.energy_, frame_.energy_);
}
// TODO(andrew): should not allow negative scales.
diff --git a/chromium/third_party/webrtc/modules/utility/source/file_player_impl.cc b/chromium/third_party/webrtc/modules/utility/source/file_player_impl.cc
index 9240e64691e..8049245fb06 100644
--- a/chromium/third_party/webrtc/modules/utility/source/file_player_impl.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/file_player_impl.cc
@@ -9,7 +9,7 @@
*/
#include "webrtc/modules/utility/source/file_player_impl.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
#include "frame_scaler.h"
@@ -35,8 +35,6 @@ FilePlayer* FilePlayer::CreateFilePlayer(uint32_t instanceID,
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
return new VideoFilePlayerImpl(instanceID, fileFormat);
#else
- WEBRTC_TRACE(kTraceError, kTraceFile, -1,
- "Invalid file format: %d", kFileFormatAviFile);
assert(false);
return NULL;
#endif
@@ -114,10 +112,9 @@ int32_t FilePlayerImpl::Get10msAudioFromFile(
{
if(_codec.plfreq == 0)
{
- WEBRTC_TRACE(kTraceWarning, kTraceVoice, _instanceID,
- "FilePlayerImpl::Get10msAudioFromFile() playing not started!\
- codecFreq = %d, wantedFreq = %d",
- _codec.plfreq, frequencyInHz);
+ LOG(LS_WARNING) << "Get10msAudioFromFile() playing not started!"
+ << " codec freq = " << _codec.plfreq
+ << ", wanted freq = " << frequencyInHz;
return -1;
}
@@ -175,8 +172,7 @@ int32_t FilePlayerImpl::Get10msAudioFromFile(
if(_resampler.ResetIfNeeded(unresampledAudioFrame.sample_rate_hz_,
frequencyInHz, kResamplerSynchronous))
{
- WEBRTC_TRACE(kTraceWarning, kTraceVoice, _instanceID,
- "FilePlayerImpl::Get10msAudioFromFile() unexpected codec");
+ LOG(LS_WARNING) << "Get10msAudioFromFile() unexpected codec.";
// New sampling frequency. Update state.
outLen = frequencyInHz / 100;
@@ -214,8 +210,7 @@ int32_t FilePlayerImpl::SetAudioScaling(float scaleFactor)
_scaling = scaleFactor;
return 0;
}
- WEBRTC_TRACE(kTraceWarning, kTraceVoice, _instanceID,
- "FilePlayerImpl::SetAudioScaling() not allowed scale factor");
+ LOG(LS_WARNING) << "SetAudioScaling() non-allowed scale factor.";
return -1;
}
@@ -255,9 +250,8 @@ int32_t FilePlayerImpl::StartPlayingFile(const char* fileName,
codecInstL16.pacsize = 160;
} else
{
- WEBRTC_TRACE(kTraceError, kTraceVoice, _instanceID,
- "FilePlayerImpl::StartPlayingFile() sample frequency\
- specifed not supported for PCM format.");
+ LOG(LS_ERROR) << "StartPlayingFile() sample frequency not "
+ << "supported for PCM format.";
return -1;
}
@@ -266,12 +260,8 @@ int32_t FilePlayerImpl::StartPlayingFile(const char* fileName,
startPosition,
stopPosition) == -1)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FilePlayerImpl::StartPlayingFile() failed to initialize file\
- %s playout.", fileName);
+ LOG(LS_WARNING) << "StartPlayingFile() failed to initialize "
+ << "pcm file " << fileName;
return -1;
}
SetAudioScaling(volumeScaling);
@@ -280,13 +270,8 @@ int32_t FilePlayerImpl::StartPlayingFile(const char* fileName,
if (_fileModule.StartPlayingAudioFile(fileName, notification, loop,
_fileFormat, codecInst) == -1)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FilePlayerImpl::StartPlayingPreEncodedFile() failed to\
- initialize pre-encoded file %s playout.",
- fileName);
+ LOG(LS_WARNING) << "StartPlayingFile() failed to initialize "
+ << "pre-encoded file " << fileName;
return -1;
}
} else
@@ -297,12 +282,8 @@ int32_t FilePlayerImpl::StartPlayingFile(const char* fileName,
startPosition,
stopPosition) == -1)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FilePlayerImpl::StartPlayingFile() failed to initialize file\
- %s playout.", fileName);
+ LOG(LS_WARNING) << "StartPlayingFile() failed to initialize file "
+ << fileName;
return -1;
}
SetAudioScaling(volumeScaling);
@@ -350,12 +331,8 @@ int32_t FilePlayerImpl::StartPlayingFile(InStream& sourceStream,
codecInstL16.pacsize = 160;
}else
{
- WEBRTC_TRACE(
- kTraceError,
- kTraceVoice,
- _instanceID,
- "FilePlayerImpl::StartPlayingFile() sample frequency specifed\
- not supported for PCM format.");
+ LOG(LS_ERROR) << "StartPlayingFile() sample frequency not "
+ << "supported for PCM format.";
return -1;
}
if (_fileModule.StartPlayingAudioStream(sourceStream, notification,
@@ -363,12 +340,8 @@ int32_t FilePlayerImpl::StartPlayingFile(InStream& sourceStream,
startPosition,
stopPosition) == -1)
{
- WEBRTC_TRACE(
- kTraceError,
- kTraceVoice,
- _instanceID,
- "FilePlayerImpl::StartPlayingFile() failed to initialize stream\
- playout.");
+ LOG(LS_ERROR) << "StartPlayingFile() failed to initialize stream "
+ << "playout.";
return -1;
}
@@ -377,12 +350,8 @@ int32_t FilePlayerImpl::StartPlayingFile(InStream& sourceStream,
if (_fileModule.StartPlayingAudioStream(sourceStream, notification,
_fileFormat, codecInst) == -1)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FilePlayerImpl::StartPlayingFile() failed to initialize stream\
- playout.");
+ LOG(LS_ERROR) << "StartPlayingFile() failed to initialize stream "
+ << "playout.";
return -1;
}
} else {
@@ -392,9 +361,8 @@ int32_t FilePlayerImpl::StartPlayingFile(InStream& sourceStream,
startPosition,
stopPosition) == -1)
{
- WEBRTC_TRACE(kTraceError, kTraceVoice, _instanceID,
- "FilePlayerImpl::StartPlayingFile() failed to initialize\
- stream playout.");
+ LOG(LS_ERROR) << "StartPlayingFile() failed to initialize stream "
+ << "playout.";
return -1;
}
}
@@ -430,23 +398,14 @@ int32_t FilePlayerImpl::SetUpAudioDecoder()
{
if ((_fileModule.codec_info(_codec) == -1))
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FilePlayerImpl::StartPlayingFile() failed to retrieve Codec info\
- of file data.");
+ LOG(LS_WARNING) << "Failed to retrieve codec info of file data.";
return -1;
}
if( STR_CASE_CMP(_codec.plname, "L16") != 0 &&
_audioDecoder.SetDecodeCodec(_codec,AMRFileStorage) == -1)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FilePlayerImpl::StartPlayingFile() codec %s not supported",
- _codec.plname);
+ LOG(LS_WARNING) << "SetUpAudioDecoder() codec " << _codec.plname
+ << " not supported.";
return -1;
}
_numberOf10MsPerFrame = _codec.pacsize / (_codec.plfreq / 100);
@@ -458,7 +417,7 @@ int32_t FilePlayerImpl::SetUpAudioDecoder()
VideoFilePlayerImpl::VideoFilePlayerImpl(uint32_t instanceID,
FileFormats fileFormat)
: FilePlayerImpl(instanceID, fileFormat),
- video_decoder_(new VideoCoder(instanceID)),
+ video_decoder_(new VideoCoder()),
video_codec_info_(),
_decodedVideoFrames(0),
_encodedData(*new EncodedVideoData()),
@@ -522,7 +481,7 @@ int32_t VideoFilePlayerImpl::StopPlayingFile()
CriticalSectionScoped lock( _critSec);
_decodedVideoFrames = 0;
- video_decoder_.reset(new VideoCoder(_instanceID));
+ video_decoder_.reset(new VideoCoder());
return FilePlayerImpl::StopPlayingFile();
}
@@ -627,12 +586,7 @@ int32_t VideoFilePlayerImpl::TimeUntilNextVideoFrame()
reinterpret_cast< int8_t*>(_encodedData.payloadData),
encodedBufferLengthInBytes) != 0)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVideo,
- _instanceID,
- "FilePlayerImpl::TimeUntilNextVideoFrame() error reading\
- video data");
+ LOG(LS_WARNING) << "Error reading video data.";
return -1;
}
_encodedData.payloadSize = encodedBufferLengthInBytes;
@@ -685,23 +639,16 @@ int32_t VideoFilePlayerImpl::SetUpVideoDecoder()
{
if (_fileModule.VideoCodecInst(video_codec_info_) != 0)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVideo,
- _instanceID,
- "FilePlayerImpl::SetVideoDecoder() failed to retrieve Codec info of\
- file data.");
+ LOG(LS_WARNING) << "SetVideoDecoder() failed to retrieve codec info of "
+ << "file data.";
return -1;
}
int32_t useNumberOfCores = 1;
if (video_decoder_->SetDecodeCodec(video_codec_info_, useNumberOfCores) !=
0) {
- WEBRTC_TRACE(kTraceWarning,
- kTraceVideo,
- _instanceID,
- "FilePlayerImpl::SetUpVideoDecoder() codec %s not supported",
- video_codec_info_.plName);
+ LOG(LS_WARNING) << "SetUpVideoDecoder() codec "
+ << video_codec_info_.plName << " not supported.";
return -1;
}
diff --git a/chromium/third_party/webrtc/modules/utility/source/file_player_unittests.cc b/chromium/third_party/webrtc/modules/utility/source/file_player_unittests.cc
new file mode 100644
index 00000000000..d430d9f59ad
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/utility/source/file_player_unittests.cc
@@ -0,0 +1,106 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Unit tests for FilePlayer.
+
+#include "webrtc/modules/utility/interface/file_player.h"
+
+#include <stdio.h>
+#include <string>
+
+#include "gflags/gflags.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/base/md5digest.h"
+#include "webrtc/base/stringencode.h"
+#include "webrtc/test/testsupport/fileutils.h"
+
+DEFINE_bool(file_player_output, false, "Generate reference files.");
+
+namespace webrtc {
+
+class FilePlayerTest : public ::testing::Test {
+ protected:
+ static const uint32_t kId = 0;
+ static const FileFormats kFileFormat = kFileFormatWavFile;
+ static const int kSampleRateHz = 8000;
+
+ FilePlayerTest()
+ : player_(FilePlayer::CreateFilePlayer(kId, kFileFormat)),
+ output_file_(NULL) {}
+
+ virtual void SetUp() OVERRIDE {
+ if (FLAGS_file_player_output) {
+ std::string output_file =
+ webrtc::test::OutputPath() + "file_player_unittest_out.pcm";
+ output_file_ = fopen(output_file.c_str(), "wb");
+ ASSERT_TRUE(output_file_ != NULL);
+ }
+ }
+
+ virtual void TearDown() OVERRIDE {
+ if (output_file_)
+ fclose(output_file_);
+ }
+
+ ~FilePlayerTest() { FilePlayer::DestroyFilePlayer(player_); }
+
+ void PlayFileAndCheck(const std::string& input_file,
+ const std::string& ref_checksum,
+ int output_length_ms) {
+ const float kScaling = 1;
+ ASSERT_EQ(0,
+ player_->StartPlayingFile(
+ input_file.c_str(), false, 0, kScaling, 0, 0, NULL));
+ rtc::Md5Digest checksum;
+ for (int i = 0; i < output_length_ms / 10; ++i) {
+ int16_t out[10 * kSampleRateHz / 1000] = {0};
+ int num_samples;
+ EXPECT_EQ(0,
+ player_->Get10msAudioFromFile(out, num_samples, kSampleRateHz));
+ checksum.Update(out, num_samples * sizeof(out[0]));
+ if (FLAGS_file_player_output) {
+ ASSERT_EQ(static_cast<size_t>(num_samples),
+ fwrite(out, sizeof(out[0]), num_samples, output_file_));
+ }
+ }
+ char checksum_result[rtc::Md5Digest::kSize];
+ EXPECT_EQ(rtc::Md5Digest::kSize,
+ checksum.Finish(checksum_result, rtc::Md5Digest::kSize));
+ EXPECT_EQ(ref_checksum,
+ rtc::hex_encode(checksum_result, sizeof(checksum_result)));
+ }
+
+ FilePlayer* player_;
+ FILE* output_file_;
+};
+
+TEST_F(FilePlayerTest, PlayWavPcmuFile) {
+ const std::string kFileName =
+ test::ResourcePath("utility/encapsulated_pcmu_8khz", "wav");
+ // The file is longer than this, but keeping the output shorter limits the
+ // runtime for the test.
+ const int kOutputLengthMs = 10000;
+ const std::string kRefChecksum = "c74e7fd432d439b1311e1c16815b3e9a";
+
+ PlayFileAndCheck(kFileName, kRefChecksum, kOutputLengthMs);
+}
+
+TEST_F(FilePlayerTest, PlayWavPcm16File) {
+ const std::string kFileName =
+ test::ResourcePath("utility/encapsulated_pcm16b_8khz", "wav");
+ // The file is longer than this, but keeping the output shorter limits the
+ // runtime for the test.
+ const int kOutputLengthMs = 10000;
+ const std::string kRefChecksum = "e41d7e1dac8aeae9f21e8e03cd7ecd71";
+
+ PlayFileAndCheck(kFileName, kRefChecksum, kOutputLengthMs);
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc b/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc
index 16faa58d3fc..264b867a259 100644
--- a/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.cc
@@ -12,7 +12,7 @@
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/media_file/interface/media_file.h"
#include "webrtc/modules/utility/source/file_recorder_impl.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
#include "critical_section_wrapper.h"
@@ -38,8 +38,6 @@ FileRecorder* FileRecorder::CreateFileRecorder(uint32_t instanceID,
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
return new AviRecorder(instanceID, fileFormat);
#else
- WEBRTC_TRACE(kTraceError, kTraceFile, -1,
- "Invalid file format: %d", kFileFormatAviFile);
assert(false);
return NULL;
#endif
@@ -115,13 +113,8 @@ int32_t FileRecorderImpl::StartRecordingAudioFile(
}
if( retVal != 0)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FileRecorder::StartRecording() failed to initialize file %s for\
- recording.",
- fileName);
+ LOG(LS_WARNING) << "Failed to initialize file " << fileName
+ << " for recording.";
if(IsRecording())
{
@@ -152,12 +145,7 @@ int32_t FileRecorderImpl::StartRecordingAudioFile(
}
if( retVal != 0)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FileRecorder::StartRecording() failed to initialize outStream for\
- recording.");
+ LOG(LS_WARNING) << "Failed to initialize outStream for recording.";
if(IsRecording())
{
@@ -184,12 +172,8 @@ int32_t FileRecorderImpl::RecordAudioToFile(
{
if (codec_info_.plfreq == 0)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FileRecorder::RecordAudioToFile() recording audio is not turned\
- on");
+ LOG(LS_WARNING) << "RecordAudioToFile() recording audio is not "
+ << "turned on.";
return -1;
}
AudioFrame tempAudioFrame;
@@ -250,13 +234,9 @@ int32_t FileRecorderImpl::RecordAudioToFile(
if (_audioEncoder.Encode(*ptrAudioFrame, _audioBuffer,
encodedLenInBytes) == -1)
{
- WEBRTC_TRACE(
- kTraceWarning,
- kTraceVoice,
- _instanceID,
- "FileRecorder::RecordAudioToFile() codec %s not supported or\
- failed to encode stream",
- codec_info_.plname);
+ LOG(LS_WARNING) << "RecordAudioToFile() codec "
+ << codec_info_.plname
+ << " not supported or failed to encode stream.";
return -1;
}
} else {
@@ -309,12 +289,8 @@ int32_t FileRecorderImpl::SetUpAudioEncoder()
{
if(_audioEncoder.SetEncodeCodec(codec_info_,_amrFormat) == -1)
{
- WEBRTC_TRACE(
- kTraceError,
- kTraceVoice,
- _instanceID,
- "FileRecorder::StartRecording() codec %s not supported",
- codec_info_.plname);
+ LOG(LS_ERROR) << "SetUpAudioEncoder() codec "
+ << codec_info_.plname << " not supported.";
return -1;
}
}
@@ -342,31 +318,6 @@ int32_t FileRecorderImpl::WriteEncodedAudioData(
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
-class AudioFrameFileInfo
-{
- public:
- AudioFrameFileInfo(const int8_t* audioData,
- const uint16_t audioSize,
- const uint16_t audioMS,
- const TickTime& playoutTS)
- : _audioData(), _audioSize(audioSize), _audioMS(audioMS),
- _playoutTS(playoutTS)
- {
- if(audioSize > MAX_AUDIO_BUFFER_IN_BYTES)
- {
- assert(false);
- _audioSize = 0;
- return;
- }
- memcpy(_audioData, audioData, audioSize);
- };
- // TODO (hellner): either turn into a struct or provide get/set functions.
- int8_t _audioData[MAX_AUDIO_BUFFER_IN_BYTES];
- uint16_t _audioSize;
- uint16_t _audioMS;
- TickTime _playoutTS;
-};
-
AviRecorder::AviRecorder(uint32_t instanceID, FileFormats fileFormat)
: FileRecorderImpl(instanceID, fileFormat),
_videoOnly(false),
@@ -377,7 +328,7 @@ AviRecorder::AviRecorder(uint32_t instanceID, FileFormats fileFormat)
_writtenAudioMS(0),
_writtenVideoMS(0)
{
- _videoEncoder = new VideoCoder(instanceID);
+ _videoEncoder = new VideoCoder();
_frameScaler = new FrameScaler();
_videoFramesQueue = new VideoFramesQueue();
_thread = ThreadWrapper::CreateThread(Run, this, kNormalPriority,
@@ -545,49 +496,39 @@ int32_t AviRecorder::ProcessAudio()
{
// Syncronize audio to the current frame to process by throwing away
// audio samples with older timestamp than the video frame.
- uint32_t numberOfAudioElements =
- _audioFramesToWrite.GetSize();
- for (uint32_t i = 0; i < numberOfAudioElements; ++i)
+ size_t numberOfAudioElements =
+ _audioFramesToWrite.size();
+ for (size_t i = 0; i < numberOfAudioElements; ++i)
{
- AudioFrameFileInfo* frameInfo =
- (AudioFrameFileInfo*)_audioFramesToWrite.First()->GetItem();
- if(frameInfo)
+ AudioFrameFileInfo* frameInfo = _audioFramesToWrite.front();
+ if(TickTime::TicksToMilliseconds(
+ frameInfo->_playoutTS.Ticks()) <
+ frameToProcess->render_time_ms())
+ {
+ delete frameInfo;
+ _audioFramesToWrite.pop_front();
+ } else
{
- if(TickTime::TicksToMilliseconds(
- frameInfo->_playoutTS.Ticks()) <
- frameToProcess->render_time_ms())
- {
- delete frameInfo;
- _audioFramesToWrite.PopFront();
- } else
- {
- break;
- }
+ break;
}
}
}
}
// Write all audio up to current timestamp.
int32_t error = 0;
- uint32_t numberOfAudioElements = _audioFramesToWrite.GetSize();
- for (uint32_t i = 0; i < numberOfAudioElements; ++i)
+ size_t numberOfAudioElements = _audioFramesToWrite.size();
+ for (size_t i = 0; i < numberOfAudioElements; ++i)
{
- AudioFrameFileInfo* frameInfo =
- (AudioFrameFileInfo*)_audioFramesToWrite.First()->GetItem();
- if(frameInfo)
+ AudioFrameFileInfo* frameInfo = _audioFramesToWrite.front();
+ if((TickTime::Now() - frameInfo->_playoutTS).Milliseconds() > 0)
{
- if((TickTime::Now() - frameInfo->_playoutTS).Milliseconds() > 0)
- {
- _moduleFile->IncomingAudioData(frameInfo->_audioData,
- frameInfo->_audioSize);
- _writtenAudioMS += frameInfo->_audioMS;
- delete frameInfo;
- _audioFramesToWrite.PopFront();
- } else {
- break;
- }
+ _moduleFile->IncomingAudioData(frameInfo->_audioData,
+ frameInfo->_audioSize);
+ _writtenAudioMS += frameInfo->_audioMS;
+ delete frameInfo;
+ _audioFramesToWrite.pop_front();
} else {
- _audioFramesToWrite.PopFront();
+ break;
}
}
return error;
@@ -635,8 +576,8 @@ bool AviRecorder::Process()
error = EncodeAndWriteVideoToFile( *frameToProcess);
if( error != 0)
{
- WEBRTC_TRACE(kTraceError, kTraceVideo, _instanceID,
- "AviRecorder::Process() error writing to file.");
+ LOG(LS_ERROR) << "AviRecorder::Process() error writing to "
+ << "file.";
break;
} else {
uint32_t frameLengthMS = 1000 /
@@ -675,8 +616,7 @@ bool AviRecorder::Process()
error = EncodeAndWriteVideoToFile( *frameToProcess);
if(error != 0)
{
- WEBRTC_TRACE(kTraceError, kTraceVideo, _instanceID,
- "AviRecorder::Process() error writing to file.");
+ LOG(LS_ERROR) << "AviRecorder::Process() error writing to file.";
} else {
_writtenVideoMS += frameLengthMS;
}
@@ -727,17 +667,12 @@ int32_t AviRecorder::EncodeAndWriteVideoToFile(I420VideoFrame& videoFrame)
(int8_t*)(_videoEncodedData.payloadData),
_videoEncodedData.payloadSize))
{
- WEBRTC_TRACE(kTraceError, kTraceVideo, _instanceID,
- "Error writing AVI file");
+ LOG(LS_ERROR) << "Error writing AVI file.";
return -1;
}
} else {
- WEBRTC_TRACE(
- kTraceError,
- kTraceVideo,
- _instanceID,
- "FileRecorder::RecordVideoToFile() frame dropped by encoder bitrate\
- likely to low.");
+ LOG(LS_ERROR) << "FileRecorder::RecordVideoToFile() frame dropped by "
+ << "encoder, bitrate likely too low.";
}
return 0;
}
@@ -750,6 +685,8 @@ int32_t AviRecorder::WriteEncodedAudioData(
uint16_t millisecondsOfData,
const TickTime* playoutTS)
{
+ CriticalSectionScoped lock(_critSec);
+
if (!IsRecording())
{
return -1;
@@ -762,7 +699,7 @@ int32_t AviRecorder::WriteEncodedAudioData(
{
return -1;
}
- if (_audioFramesToWrite.GetSize() > kMaxAudioBufferQueueLength)
+ if (_audioFramesToWrite.size() > kMaxAudioBufferQueueLength)
{
StopRecording();
return -1;
@@ -771,15 +708,15 @@ int32_t AviRecorder::WriteEncodedAudioData(
if(playoutTS)
{
- _audioFramesToWrite.PushBack(new AudioFrameFileInfo(audioBuffer,
- bufferLength,
- millisecondsOfData,
- *playoutTS));
+ _audioFramesToWrite.push_back(new AudioFrameFileInfo(audioBuffer,
+ bufferLength,
+ millisecondsOfData,
+ *playoutTS));
} else {
- _audioFramesToWrite.PushBack(new AudioFrameFileInfo(audioBuffer,
- bufferLength,
- millisecondsOfData,
- TickTime::Now()));
+ _audioFramesToWrite.push_back(new AudioFrameFileInfo(audioBuffer,
+ bufferLength,
+ millisecondsOfData,
+ TickTime::Now()));
}
_timeEvent.Set();
return 0;
diff --git a/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h b/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h
index 0b7290eddce..53fd26bc25d 100644
--- a/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h
+++ b/chromium/third_party/webrtc/modules/utility/source/file_recorder_impl.h
@@ -15,6 +15,8 @@
#ifndef WEBRTC_MODULES_UTILITY_SOURCE_FILE_RECORDER_IMPL_H_
#define WEBRTC_MODULES_UTILITY_SOURCE_FILE_RECORDER_IMPL_H_
+#include <list>
+
#include "webrtc/common_audio/resampler/include/resampler.h"
#include "webrtc/common_types.h"
#include "webrtc/engine_configurations.h"
@@ -40,6 +42,8 @@ enum { MAX_AUDIO_BUFFER_IN_SAMPLES = 60*32};
enum { MAX_AUDIO_BUFFER_IN_BYTES = MAX_AUDIO_BUFFER_IN_SAMPLES*2};
enum { kMaxAudioBufferQueueLength = 100 };
+class CriticalSectionWrapper;
+
class FileRecorderImpl : public FileRecorder
{
public:
@@ -103,6 +107,31 @@ private:
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+class AudioFrameFileInfo
+{
+ public:
+ AudioFrameFileInfo(const int8_t* audioData,
+ const uint16_t audioSize,
+ const uint16_t audioMS,
+ const TickTime& playoutTS)
+ : _audioData(), _audioSize(audioSize), _audioMS(audioMS),
+ _playoutTS(playoutTS)
+ {
+ if(audioSize > MAX_AUDIO_BUFFER_IN_BYTES)
+ {
+ assert(false);
+ _audioSize = 0;
+ return;
+ }
+ memcpy(_audioData, audioData, audioSize);
+ };
+ // TODO (hellner): either turn into a struct or provide get/set functions.
+ int8_t _audioData[MAX_AUDIO_BUFFER_IN_BYTES];
+ uint16_t _audioSize;
+ uint16_t _audioMS;
+ TickTime _playoutTS;
+};
+
class AviRecorder : public FileRecorderImpl
{
public:
@@ -126,6 +155,7 @@ protected:
uint16_t millisecondsOfData,
const TickTime* playoutTS);
private:
+ typedef std::list<AudioFrameFileInfo*> AudioInfoList;
static bool Run(ThreadObj threadObj);
bool Process();
@@ -141,7 +171,7 @@ private:
VideoCodec _videoCodecInst;
bool _videoOnly;
- ListWrapper _audioFramesToWrite;
+ AudioInfoList _audioFramesToWrite;
bool _firstAudioFrameReceived;
VideoFramesQueue* _videoFramesQueue;
diff --git a/chromium/third_party/webrtc/modules/utility/source/frame_scaler.cc b/chromium/third_party/webrtc/modules/utility/source/frame_scaler.cc
index ed127a6715a..50ccf8adc67 100644
--- a/chromium/third_party/webrtc/modules/utility/source/frame_scaler.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/frame_scaler.cc
@@ -13,7 +13,6 @@
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
#include "webrtc/common_video/libyuv/include/scaler.h"
-#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
diff --git a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc
index 08979d24935..bf7db3bc846 100644
--- a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.cc
@@ -10,7 +10,7 @@
#include "webrtc/modules/interface/module.h"
#include "webrtc/modules/utility/source/process_thread_impl.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+
namespace webrtc {
ProcessThread::~ProcessThread()
@@ -32,14 +32,12 @@ ProcessThreadImpl::ProcessThreadImpl()
_critSectModules(CriticalSectionWrapper::CreateCriticalSection()),
_thread(NULL)
{
- WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s created", __FUNCTION__);
}
ProcessThreadImpl::~ProcessThreadImpl()
{
delete _critSectModules;
delete &_timeEvent;
- WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s deleted", __FUNCTION__);
}
int32_t ProcessThreadImpl::Start()
@@ -87,25 +85,21 @@ int32_t ProcessThreadImpl::Stop()
return 0;
}
-int32_t ProcessThreadImpl::RegisterModule(const Module* module)
+int32_t ProcessThreadImpl::RegisterModule(Module* module)
{
CriticalSectionScoped lock(_critSectModules);
// Only allow module to be registered once.
- ListItem* item = _modules.First();
- for(uint32_t i = 0; i < _modules.GetSize() && item; i++)
- {
- if(module == item->GetItem())
+ for (ModuleList::iterator iter = _modules.begin();
+ iter != _modules.end(); ++iter) {
+ if(module == *iter)
{
return -1;
}
- item = _modules.Next(item);
}
- _modules.PushFront(module);
- WEBRTC_TRACE(kTraceInfo, kTraceUtility, -1,
- "number of registered modules has increased to %d",
- _modules.GetSize());
+ _modules.push_front(module);
+
// Wake the thread calling ProcessThreadImpl::Process() to update the
// waiting time. The waiting time for the just registered module may be
// shorter than all other registered modules.
@@ -116,19 +110,13 @@ int32_t ProcessThreadImpl::RegisterModule(const Module* module)
int32_t ProcessThreadImpl::DeRegisterModule(const Module* module)
{
CriticalSectionScoped lock(_critSectModules);
-
- ListItem* item = _modules.First();
- for(uint32_t i = 0; i < _modules.GetSize() && item; i++)
- {
- if(module == item->GetItem())
+ for (ModuleList::iterator iter = _modules.begin();
+ iter != _modules.end(); ++iter) {
+ if(module == *iter)
{
- int res = _modules.Erase(item);
- WEBRTC_TRACE(kTraceInfo, kTraceUtility, -1,
- "number of registered modules has decreased to %d",
- _modules.GetSize());
- return res;
+ _modules.erase(iter);
+ return 0;
}
- item = _modules.Next(item);
}
return -1;
}
@@ -145,16 +133,13 @@ bool ProcessThreadImpl::Process()
int32_t minTimeToNext = 100;
{
CriticalSectionScoped lock(_critSectModules);
- ListItem* item = _modules.First();
- for(uint32_t i = 0; i < _modules.GetSize() && item; i++)
- {
- int32_t timeToNext =
- static_cast<Module*>(item->GetItem())->TimeUntilNextProcess();
+ for (ModuleList::iterator iter = _modules.begin();
+ iter != _modules.end(); ++iter) {
+ int32_t timeToNext = (*iter)->TimeUntilNextProcess();
if(minTimeToNext > timeToNext)
{
minTimeToNext = timeToNext;
}
- item = _modules.Next(item);
}
}
@@ -172,16 +157,13 @@ bool ProcessThreadImpl::Process()
}
{
CriticalSectionScoped lock(_critSectModules);
- ListItem* item = _modules.First();
- for(uint32_t i = 0; i < _modules.GetSize() && item; i++)
- {
- int32_t timeToNext =
- static_cast<Module*>(item->GetItem())->TimeUntilNextProcess();
+ for (ModuleList::iterator iter = _modules.begin();
+ iter != _modules.end(); ++iter) {
+ int32_t timeToNext = (*iter)->TimeUntilNextProcess();
if(timeToNext < 1)
{
- static_cast<Module*>(item->GetItem())->Process();
+ (*iter)->Process();
}
- item = _modules.Next(item);
}
}
return true;
diff --git a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h
index d1913c47c49..14fbc18a2a5 100644
--- a/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h
+++ b/chromium/third_party/webrtc/modules/utility/source/process_thread_impl.h
@@ -11,10 +11,11 @@
#ifndef WEBRTC_MODULES_UTILITY_SOURCE_PROCESS_THREAD_IMPL_H_
#define WEBRTC_MODULES_UTILITY_SOURCE_PROCESS_THREAD_IMPL_H_
+#include <list>
+
#include "webrtc/modules/utility/interface/process_thread.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
-#include "webrtc/system_wrappers/interface/list_wrapper.h"
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
#include "webrtc/typedefs.h"
@@ -28,7 +29,7 @@ public:
virtual int32_t Start();
virtual int32_t Stop();
- virtual int32_t RegisterModule(const Module* module);
+ virtual int32_t RegisterModule(Module* module);
virtual int32_t DeRegisterModule(const Module* module);
protected:
@@ -37,9 +38,10 @@ protected:
bool Process();
private:
+ typedef std::list<Module*> ModuleList;
EventWrapper& _timeEvent;
CriticalSectionWrapper* _critSectModules;
- ListWrapper _modules;
+ ModuleList _modules;
ThreadWrapper* _thread;
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/utility/source/rtp_dump_impl.cc b/chromium/third_party/webrtc/modules/utility/source/rtp_dump_impl.cc
index 39316f47858..547df332f9f 100644
--- a/chromium/third_party/webrtc/modules/utility/source/rtp_dump_impl.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/rtp_dump_impl.cc
@@ -14,7 +14,7 @@
#include <stdio.h>
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#if defined(_WIN32)
#include <Windows.h>
@@ -71,7 +71,6 @@ RtpDumpImpl::RtpDumpImpl()
_file(*FileWrapper::Create()),
_startTime(0)
{
- WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s created", __FUNCTION__);
}
RtpDump::~RtpDump()
@@ -84,7 +83,6 @@ RtpDumpImpl::~RtpDumpImpl()
_file.CloseFile();
delete &_file;
delete _critSect;
- WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s deleted", __FUNCTION__);
}
int32_t RtpDumpImpl::Start(const char* fileNameUTF8)
@@ -100,8 +98,7 @@ int32_t RtpDumpImpl::Start(const char* fileNameUTF8)
_file.CloseFile();
if (_file.OpenFile(fileNameUTF8, false, false, false) == -1)
{
- WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
- "failed to open the specified file");
+ LOG(LS_ERROR) << "Failed to open file.";
return -1;
}
@@ -113,8 +110,7 @@ int32_t RtpDumpImpl::Start(const char* fileNameUTF8)
sprintf(magic, "#!rtpplay%s \n", RTPFILE_VERSION);
if (_file.WriteText(magic) == -1)
{
- WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
- "error writing to file");
+ LOG(LS_ERROR) << "Error writing to file.";
return -1;
}
@@ -129,8 +125,7 @@ int32_t RtpDumpImpl::Start(const char* fileNameUTF8)
memset(dummyHdr, 0, 16);
if (!_file.Write(dummyHdr, sizeof(dummyHdr)))
{
- WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
- "error writing to file");
+ LOG(LS_ERROR) << "Error writing to file.";
return -1;
}
return 0;
@@ -198,14 +193,12 @@ int32_t RtpDumpImpl::DumpPacket(const uint8_t* packet, uint16_t packetLength)
if (!_file.Write(&hdr, sizeof(hdr)))
{
- WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
- "error writing to file");
+ LOG(LS_ERROR) << "Error writing to file.";
return -1;
}
if (!_file.Write(packet, packetLength))
{
- WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
- "error writing to file");
+ LOG(LS_ERROR) << "Error writing to file.";
return -1;
}
diff --git a/chromium/third_party/webrtc/modules/utility/source/video_coder.cc b/chromium/third_party/webrtc/modules/utility/source/video_coder.cc
index 267ed810489..5096acecd25 100644
--- a/chromium/third_party/webrtc/modules/utility/source/video_coder.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/video_coder.cc
@@ -13,10 +13,7 @@
#include "webrtc/modules/utility/source/video_coder.h"
namespace webrtc {
-VideoCoder::VideoCoder(uint32_t instanceID)
- : _vcm(VideoCodingModule::Create(instanceID)),
- _decodedVideo(0)
-{
+VideoCoder::VideoCoder() : _vcm(VideoCodingModule::Create()), _decodedVideo(0) {
_vcm->InitializeSender();
_vcm->InitializeReceiver();
diff --git a/chromium/third_party/webrtc/modules/utility/source/video_coder.h b/chromium/third_party/webrtc/modules/utility/source/video_coder.h
index cb8bfa5a182..8e4344be16b 100644
--- a/chromium/third_party/webrtc/modules/utility/source/video_coder.h
+++ b/chromium/third_party/webrtc/modules/utility/source/video_coder.h
@@ -20,7 +20,7 @@ namespace webrtc {
class VideoCoder : public VCMPacketizationCallback, public VCMReceiveCallback
{
public:
- VideoCoder(uint32_t instanceID);
+ VideoCoder();
~VideoCoder();
int32_t SetEncodeCodec(VideoCodec& videoCodecInst,
diff --git a/chromium/third_party/webrtc/modules/utility/source/video_frames_queue.cc b/chromium/third_party/webrtc/modules/utility/source/video_frames_queue.cc
index d3d37bec2d0..9ade8b51a49 100644
--- a/chromium/third_party/webrtc/modules/utility/source/video_frames_queue.cc
+++ b/chromium/third_party/webrtc/modules/utility/source/video_frames_queue.cc
@@ -16,80 +16,48 @@
#include "webrtc/common_video/interface/texture_video_frame.h"
#include "webrtc/modules/interface/module_common_types.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
-#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
VideoFramesQueue::VideoFramesQueue()
- : _incomingFrames(),
- _renderDelayMs(10)
+ : _renderDelayMs(10)
{
}
VideoFramesQueue::~VideoFramesQueue() {
- while (!_incomingFrames.Empty()) {
- ListItem* item = _incomingFrames.First();
- if (item) {
- I420VideoFrame* ptrFrame = static_cast<I420VideoFrame*>(item->GetItem());
- assert(ptrFrame != NULL);
- delete ptrFrame;
- }
- _incomingFrames.Erase(item);
+ for (FrameList::iterator iter = _incomingFrames.begin();
+ iter != _incomingFrames.end(); ++iter) {
+ delete *iter;
}
- while (!_emptyFrames.Empty()) {
- ListItem* item = _emptyFrames.First();
- if (item) {
- I420VideoFrame* ptrFrame =
- static_cast<I420VideoFrame*>(item->GetItem());
- assert(ptrFrame != NULL);
- delete ptrFrame;
- }
- _emptyFrames.Erase(item);
+ for (FrameList::iterator iter = _emptyFrames.begin();
+ iter != _emptyFrames.end(); ++iter) {
+ delete *iter;
}
}
int32_t VideoFramesQueue::AddFrame(const I420VideoFrame& newFrame) {
if (newFrame.native_handle() != NULL) {
- _incomingFrames.PushBack(new TextureVideoFrame(
- static_cast<NativeHandle*>(newFrame.native_handle()),
- newFrame.width(),
- newFrame.height(),
- newFrame.timestamp(),
- newFrame.render_time_ms()));
+ _incomingFrames.push_back(newFrame.CloneFrame());
return 0;
}
I420VideoFrame* ptrFrameToAdd = NULL;
// Try to re-use a VideoFrame. Only allocate new memory if it is necessary.
- if (!_emptyFrames.Empty()) {
- ListItem* item = _emptyFrames.First();
- if (item) {
- ptrFrameToAdd = static_cast<I420VideoFrame*>(item->GetItem());
- _emptyFrames.Erase(item);
- }
+ if (!_emptyFrames.empty()) {
+ ptrFrameToAdd = _emptyFrames.front();
+ _emptyFrames.pop_front();
}
if (!ptrFrameToAdd) {
- if (_emptyFrames.GetSize() + _incomingFrames.GetSize() >
+ if (_emptyFrames.size() + _incomingFrames.size() >
KMaxNumberOfFrames) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
- "%s: too many frames, limit: %d", __FUNCTION__,
- KMaxNumberOfFrames);
+ LOG(LS_WARNING) << "Too many frames, limit: " << KMaxNumberOfFrames;
return -1;
}
-
- WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, -1,
- "%s: allocating buffer %d", __FUNCTION__,
- _emptyFrames.GetSize() + _incomingFrames.GetSize());
-
ptrFrameToAdd = new I420VideoFrame();
- if (!ptrFrameToAdd) {
- WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
- "%s: could not create new frame for", __FUNCTION__);
- return -1;
- }
}
ptrFrameToAdd->CopyFrame(newFrame);
- _incomingFrames.PushBack(ptrFrameToAdd);
+ _incomingFrames.push_back(ptrFrameToAdd);
return 0;
}
@@ -99,20 +67,18 @@ int32_t VideoFramesQueue::AddFrame(const I420VideoFrame& newFrame) {
// Recycle all frames that are older than the most recent frame.
I420VideoFrame* VideoFramesQueue::FrameToRecord() {
I420VideoFrame* ptrRenderFrame = NULL;
- ListItem* item = _incomingFrames.First();
- while(item) {
- I420VideoFrame* ptrOldestFrameInList =
- static_cast<I420VideoFrame*>(item->GetItem());
+ for (FrameList::iterator iter = _incomingFrames.begin();
+ iter != _incomingFrames.end(); ++iter) {
+ I420VideoFrame* ptrOldestFrameInList = *iter;
if (ptrOldestFrameInList->render_time_ms() <=
TickTime::MillisecondTimestamp() + _renderDelayMs) {
+ // List is traversed beginning to end. If ptrRenderFrame is not
+ // NULL it must be the first, and thus oldest, VideoFrame in the
+ // queue. It can be recycled.
if (ptrRenderFrame) {
- // List is traversed beginning to end. If ptrRenderFrame is not
- // NULL it must be the first, and thus oldest, VideoFrame in the
- // queue. It can be recycled.
ReturnFrame(ptrRenderFrame);
- _incomingFrames.PopFront();
+ _incomingFrames.pop_front();
}
- item = _incomingFrames.Next(item);
ptrRenderFrame = ptrOldestFrameInList;
} else {
// All VideoFrames following this one will be even newer. No match
@@ -131,7 +97,7 @@ int32_t VideoFramesQueue::ReturnFrame(I420VideoFrame* ptrOldFrame) {
ptrOldFrame->set_height(0);
ptrOldFrame->set_render_time_ms(0);
ptrOldFrame->ResetSize();
- _emptyFrames.PushBack(ptrOldFrame);
+ _emptyFrames.push_back(ptrOldFrame);
} else {
delete ptrOldFrame;
}
diff --git a/chromium/third_party/webrtc/modules/utility/source/video_frames_queue.h b/chromium/third_party/webrtc/modules/utility/source/video_frames_queue.h
index 4316bf7c047..afc64d9b71e 100644
--- a/chromium/third_party/webrtc/modules/utility/source/video_frames_queue.h
+++ b/chromium/third_party/webrtc/modules/utility/source/video_frames_queue.h
@@ -13,9 +13,10 @@
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+#include <list>
+
#include "webrtc/common_video/interface/i420_video_frame.h"
#include "webrtc/engine_configurations.h"
-#include "webrtc/system_wrappers/interface/list_wrapper.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@@ -42,6 +43,7 @@ class VideoFramesQueue {
int32_t ReturnFrame(I420VideoFrame* ptrOldFrame);
private:
+ typedef std::list<I420VideoFrame*> FrameList;
// Don't allow the buffer to expand beyond KMaxNumberOfFrames VideoFrames.
// 300 frames correspond to 10 seconds worth of frames at 30 fps.
enum {KMaxNumberOfFrames = 300};
@@ -49,9 +51,9 @@ class VideoFramesQueue {
// List of VideoFrame pointers. The list is sorted in the order of when the
// VideoFrame was inserted into the list. The first VideoFrame in the list
// was inserted first.
- ListWrapper _incomingFrames;
+ FrameList _incomingFrames;
// A list of frames that are free to be re-used.
- ListWrapper _emptyFrames;
+ FrameList _emptyFrames;
// Estimated render delay.
uint32_t _renderDelayMs;
diff --git a/chromium/third_party/webrtc/modules/video_capture/OWNERS b/chromium/third_party/webrtc/modules/video_capture/OWNERS
index 3b02126feaa..fdc2a3ff702 100644
--- a/chromium/third_party/webrtc/modules/video_capture/OWNERS
+++ b/chromium/third_party/webrtc/modules/video_capture/OWNERS
@@ -1,5 +1,13 @@
fischman@webrtc.org
+glaznev@webrtc.org
mallinath@webrtc.org
mflodman@webrtc.org
perkj@webrtc.org
wu@webrtc.org
+
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/video_capture/android/device_info_android.cc b/chromium/third_party/webrtc/modules/video_capture/android/device_info_android.cc
index 10c277eeb7a..4a80fe27229 100644
--- a/chromium/third_party/webrtc/modules/video_capture/android/device_info_android.cc
+++ b/chromium/third_party/webrtc/modules/video_capture/android/device_info_android.cc
@@ -21,36 +21,40 @@
#include "webrtc/system_wrappers/interface/ref_count.h"
#include "webrtc/system_wrappers/interface/trace.h"
-namespace webrtc
-{
+namespace webrtc {
-namespace videocapturemodule
-{
+namespace videocapturemodule {
-static std::string ResolutionsToString(
- const std::vector<std::pair<int, int> >& pairs) {
+// Helper for storing lists of pairs of ints. Used e.g. for resolutions & FPS
+// ranges.
+typedef std::pair<int, int> IntPair;
+typedef std::vector<IntPair> IntPairs;
+
+static std::string IntPairsToString(const IntPairs& pairs, char separator) {
std::stringstream stream;
for (size_t i = 0; i < pairs.size(); ++i) {
if (i > 0)
stream << ", ";
- stream << "(" << pairs[i].first << "x" << pairs[i].second << ")";
+ stream << "(" << pairs[i].first << separator << pairs[i].second << ")";
}
return stream.str();
}
struct AndroidCameraInfo {
std::string name;
- int min_mfps, max_mfps; // FPS*1000.
bool front_facing;
int orientation;
- std::vector<std::pair<int, int> > resolutions; // Pairs are: (width,height).
+ IntPairs resolutions; // Pairs are: (width,height).
+ // Pairs are (min,max) in units of FPS*1000 ("milli-frame-per-second").
+ IntPairs mfpsRanges;
std::string ToString() {
std::stringstream stream;
- stream << "Name: [" << name << "], mfps: [" << min_mfps << ":" << max_mfps
+ stream << "Name: [" << name << "], MFPS ranges: ["
+ << IntPairsToString(mfpsRanges, ':')
<< "], front_facing: " << front_facing
<< ", orientation: " << orientation << ", resolutions: ["
- << ResolutionsToString(resolutions) << "]";
+ << IntPairsToString(resolutions, 'x') << "]";
return stream.str();
}
};
@@ -120,8 +124,6 @@ void DeviceInfoAndroid::Initialize(JNIEnv* jni) {
const Json::Value& camera = cameras[i];
AndroidCameraInfo info;
info.name = camera["name"].asString();
- info.min_mfps = camera["min_mfps"].asInt();
- info.max_mfps = camera["max_mfps"].asInt();
info.front_facing = camera["front_facing"].asBool();
info.orientation = camera["orientation"].asInt();
Json::Value sizes = camera["sizes"];
@@ -130,10 +132,23 @@ void DeviceInfoAndroid::Initialize(JNIEnv* jni) {
info.resolutions.push_back(std::make_pair(
size["width"].asInt(), size["height"].asInt()));
}
+ Json::Value mfpsRanges = camera["mfpsRanges"];
+ for (Json::ArrayIndex j = 0; j < mfpsRanges.size(); ++j) {
+ const Json::Value& mfpsRange = mfpsRanges[j];
+ info.mfpsRanges.push_back(std::make_pair(mfpsRange["min_mfps"].asInt(),
+ mfpsRange["max_mfps"].asInt()));
+ }
g_camera_info->push_back(info);
}
}
+void DeviceInfoAndroid::DeInitialize() {
+ if (g_camera_info) {
+ delete g_camera_info;
+ g_camera_info = NULL;
+ }
+}
+
VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo(
const int32_t id) {
return new videocapturemodule::DeviceInfoAndroid(id);
@@ -187,14 +202,17 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap(
return -1;
for (size_t i = 0; i < info->resolutions.size(); ++i) {
- const std::pair<int, int>& size = info->resolutions[i];
- VideoCaptureCapability cap;
- cap.width = size.first;
- cap.height = size.second;
- cap.maxFPS = info->max_mfps / 1000;
- cap.expectedCaptureDelay = kExpectedCaptureDelay;
- cap.rawType = kVideoNV21;
- _captureCapabilities.push_back(cap);
+ for (size_t j = 0; j < info->mfpsRanges.size(); ++j) {
+ const IntPair& size = info->resolutions[i];
+ const IntPair& mfpsRange = info->mfpsRanges[j];
+ VideoCaptureCapability cap;
+ cap.width = size.first;
+ cap.height = size.second;
+ cap.maxFPS = mfpsRange.second / 1000;
+ cap.expectedCaptureDelay = kExpectedCaptureDelay;
+ cap.rawType = kVideoNV21;
+ _captureCapabilities.push_back(cap);
+ }
}
return _captureCapabilities.size();
}
@@ -210,13 +228,22 @@ int32_t DeviceInfoAndroid::GetOrientation(
return 0;
}
-void DeviceInfoAndroid::GetFpsRange(const char* deviceUniqueIdUTF8,
- int* min_mfps, int* max_mfps) {
+void DeviceInfoAndroid::GetMFpsRange(const char* deviceUniqueIdUTF8,
+ int max_fps_to_match,
+ int* min_mfps, int* max_mfps) {
const AndroidCameraInfo* info = FindCameraInfoByName(deviceUniqueIdUTF8);
if (info == NULL)
return;
- *min_mfps = info->min_mfps;
- *max_mfps = info->max_mfps;
+ // Rely on CameraParameters.getSupportedPreviewFpsRange() to sort its return
+ // value (per its documentation) and return the first (most flexible) range
+ // whose high end is at least as high as that requested.
+ for (size_t i = 0; i < info->mfpsRanges.size(); ++i) {
+ if (info->mfpsRanges[i].second / 1000 >= max_fps_to_match) {
+ *min_mfps = info->mfpsRanges[i].first;
+ *max_mfps = info->mfpsRanges[i].second;
+ return;
+ }
+ }
}
} // namespace videocapturemodule
diff --git a/chromium/third_party/webrtc/modules/video_capture/android/device_info_android.h b/chromium/third_party/webrtc/modules/video_capture/android/device_info_android.h
index d277113e5e1..542cbba0881 100644
--- a/chromium/third_party/webrtc/modules/video_capture/android/device_info_android.h
+++ b/chromium/third_party/webrtc/modules/video_capture/android/device_info_android.h
@@ -24,6 +24,7 @@ namespace videocapturemodule
class DeviceInfoAndroid : public DeviceInfoImpl {
public:
static void Initialize(JNIEnv* env);
+ static void DeInitialize();
DeviceInfoAndroid(int32_t id);
virtual ~DeviceInfoAndroid();
@@ -53,10 +54,12 @@ class DeviceInfoAndroid : public DeviceInfoImpl {
virtual int32_t GetOrientation(const char* deviceUniqueIdUTF8,
VideoCaptureRotation& orientation);
- // Populate |min_mfps| and |max_mfps| with the supported range of the device.
- void GetFpsRange(const char* deviceUniqueIdUTF8,
- int* min_mfps,
- int* max_mfps);
+ // Populate |min_mfps| and |max_mfps| with the closest supported range of the
+ // device to |max_fps_to_match|.
+ void GetMFpsRange(const char* deviceUniqueIdUTF8,
+ int max_fps_to_match,
+ int* min_mfps,
+ int* max_mfps);
private:
enum { kExpectedCaptureDelay = 190};
diff --git a/chromium/third_party/webrtc/modules/video_capture/android/video_capture_android.cc b/chromium/third_party/webrtc/modules/video_capture/android/video_capture_android.cc
index 2b6d60644f5..c9aa52ce7f1 100644
--- a/chromium/third_party/webrtc/modules/video_capture/android/video_capture_android.cc
+++ b/chromium/third_party/webrtc/modules/video_capture/android/video_capture_android.cc
@@ -10,6 +10,7 @@
#include "webrtc/modules/video_capture/android/video_capture_android.h"
+#include "webrtc/base/common.h"
#include "webrtc/modules/utility/interface/helpers_android.h"
#include "webrtc/modules/video_capture/android/device_info_android.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
@@ -19,9 +20,16 @@
static JavaVM* g_jvm = NULL;
static jclass g_java_capturer_class = NULL; // VideoCaptureAndroid.class.
+static jobject g_context = NULL; // Owned android.content.Context.
namespace webrtc {
+// Called by Java to get the global application context.
+jobject JNICALL GetContext(JNIEnv* env, jclass) {
+ assert(g_context);
+ return g_context;
+}
+
// Called by Java when the camera has a new frame to deliver.
void JNICALL ProvideCameraFrame(
JNIEnv* env,
@@ -38,25 +46,67 @@ void JNICALL ProvideCameraFrame(
env->ReleaseByteArrayElements(javaCameraFrame, cameraFrame, JNI_ABORT);
}
-int32_t SetCaptureAndroidVM(JavaVM* javaVM) {
- g_jvm = javaVM;
- AttachThreadScoped ats(g_jvm);
-
- videocapturemodule::DeviceInfoAndroid::Initialize(ats.env());
-
- jclass j_capture_class =
- ats.env()->FindClass("org/webrtc/videoengine/VideoCaptureAndroid");
- assert(j_capture_class);
- g_java_capturer_class =
- reinterpret_cast<jclass>(ats.env()->NewGlobalRef(j_capture_class));
- assert(g_java_capturer_class);
+// Called by Java when the device orientation has changed.
+void JNICALL OnOrientationChanged(
+ JNIEnv* env, jobject, jlong context, jint degrees) {
+ webrtc::videocapturemodule::VideoCaptureAndroid* captureModule =
+ reinterpret_cast<webrtc::videocapturemodule::VideoCaptureAndroid*>(
+ context);
+ degrees = (360 + degrees) % 360;
+ assert(degrees >= 0 && degrees < 360);
+ VideoCaptureRotation rotation =
+ (degrees <= 45 || degrees > 315) ? kCameraRotate0 :
+ (degrees > 45 && degrees <= 135) ? kCameraRotate90 :
+ (degrees > 135 && degrees <= 225) ? kCameraRotate180 :
+ (degrees > 225 && degrees <= 315) ? kCameraRotate270 :
+ kCameraRotate0; // Impossible.
+ int32_t status =
+ captureModule->VideoCaptureImpl::SetCaptureRotation(rotation);
+ RTC_UNUSED(status);
+ assert(status == 0);
+}
- JNINativeMethod native_method = {
- "ProvideCameraFrame", "([BIJ)V",
- reinterpret_cast<void*>(&ProvideCameraFrame)
- };
- if (ats.env()->RegisterNatives(g_java_capturer_class, &native_method, 1) != 0)
- assert(false);
+int32_t SetCaptureAndroidVM(JavaVM* javaVM, jobject context) {
+ if (javaVM) {
+ assert(!g_jvm);
+ g_jvm = javaVM;
+ AttachThreadScoped ats(g_jvm);
+ g_context = ats.env()->NewGlobalRef(context);
+
+ videocapturemodule::DeviceInfoAndroid::Initialize(ats.env());
+
+ jclass j_capture_class =
+ ats.env()->FindClass("org/webrtc/videoengine/VideoCaptureAndroid");
+ assert(j_capture_class);
+ g_java_capturer_class =
+ reinterpret_cast<jclass>(ats.env()->NewGlobalRef(j_capture_class));
+ assert(g_java_capturer_class);
+
+ JNINativeMethod native_methods[] = {
+ {"GetContext",
+ "()Landroid/content/Context;",
+ reinterpret_cast<void*>(&GetContext)},
+ {"OnOrientationChanged",
+ "(JI)V",
+ reinterpret_cast<void*>(&OnOrientationChanged)},
+ {"ProvideCameraFrame",
+ "([BIJ)V",
+ reinterpret_cast<void*>(&ProvideCameraFrame)}};
+ if (ats.env()->RegisterNatives(g_java_capturer_class,
+ native_methods, 3) != 0)
+ assert(false);
+ } else {
+ if (g_jvm) {
+ AttachThreadScoped ats(g_jvm);
+ ats.env()->UnregisterNatives(g_java_capturer_class);
+ ats.env()->DeleteGlobalRef(g_java_capturer_class);
+ g_java_capturer_class = NULL;
+ ats.env()->DeleteGlobalRef(g_context);
+ g_context = NULL;
+ videocapturemodule::DeviceInfoAndroid::DeInitialize();
+ g_jvm = NULL;
+ }
+ }
return 0;
}
@@ -143,7 +193,8 @@ int32_t VideoCaptureAndroid::StartCapture(
assert(j_start);
int min_mfps = 0;
int max_mfps = 0;
- _deviceInfo.GetFpsRange(_deviceUniqueId, &min_mfps, &max_mfps);
+ _deviceInfo.GetMFpsRange(_deviceUniqueId, _captureCapability.maxFPS,
+ &min_mfps, &max_mfps);
bool started = env->CallBooleanMethod(_jCapturer, j_start,
_captureCapability.width,
_captureCapability.height,
@@ -184,8 +235,9 @@ int32_t VideoCaptureAndroid::CaptureSettings(
int32_t VideoCaptureAndroid::SetCaptureRotation(
VideoCaptureRotation rotation) {
CriticalSectionScoped cs(&_apiCs);
- if (VideoCaptureImpl::SetCaptureRotation(rotation) != 0)
- return 0;
+ int32_t status = VideoCaptureImpl::SetCaptureRotation(rotation);
+ if (status != 0)
+ return status;
AttachThreadScoped ats(g_jvm);
JNIEnv* env = ats.env();
diff --git a/chromium/third_party/webrtc/modules/video_capture/device_info_impl.cc b/chromium/third_party/webrtc/modules/video_capture/device_info_impl.cc
index 2d2bc7fb547..7db6103fecb 100644
--- a/chromium/third_party/webrtc/modules/video_capture/device_info_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_capture/device_info_impl.cc
@@ -8,11 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#include <assert.h>
#include <stdlib.h>
#include "webrtc/modules/video_capture/device_info_impl.h"
#include "webrtc/modules/video_capture/video_capture_config.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#ifndef abs
#define abs(a) (a>=0?a:-a)
@@ -75,13 +76,8 @@ int32_t DeviceInfoImpl::GetCapability(const char* deviceUniqueIdUTF8,
const uint32_t deviceCapabilityNumber,
VideoCaptureCapability& capability)
{
+ assert(deviceUniqueIdUTF8 != NULL);
- if (!deviceUniqueIdUTF8)
- {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "deviceUniqueIdUTF8 parameter not set in call to GetCapability");
- return -1;
- }
ReadLockScoped cs(_apiLock);
if ((_lastUsedDeviceNameLength != strlen((char*) deviceUniqueIdUTF8))
@@ -111,9 +107,9 @@ int32_t DeviceInfoImpl::GetCapability(const char* deviceUniqueIdUTF8,
// Make sure the number is valid
if (deviceCapabilityNumber >= (unsigned int) _captureCapabilities.size())
{
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "deviceCapabilityNumber %d is invalid in call to GetCapability",
- deviceCapabilityNumber);
+ LOG(LS_ERROR) << "Invalid deviceCapabilityNumber "
+ << deviceCapabilityNumber << ">= number of capabilities ("
+ << _captureCapabilities.size() << ").";
return -1;
}
@@ -266,9 +262,9 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability(
}// else height not good
}//end for
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
- "Best camera format: Width %d, Height %d, Frame rate %d, Color format %d",
- bestWidth, bestHeight, bestFrameRate, bestRawType);
+ LOG(LS_VERBOSE) << "Best camera format: " << bestWidth << "x" << bestHeight
+ << "@" << bestFrameRate
+ << "fps, color format: " << bestRawType;
// Copy the capability
if (bestformatIndex < 0)
@@ -343,11 +339,10 @@ int32_t DeviceInfoImpl::GetExpectedCaptureDelay(
}
if (bestDelay > kMaxCaptureDelay)
{
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
- "Expected capture delay too high. %dms, will use %d", bestDelay,
- kMaxCaptureDelay);
+ LOG(LS_WARNING) << "Expected capture delay (" << bestDelay
+ << " ms) too high, using " << kMaxCaptureDelay
+ << " ms.";
bestDelay = kMaxCaptureDelay;
-
}
return bestDelay;
diff --git a/chromium/third_party/webrtc/modules/video_capture/ensure_initialized.cc b/chromium/third_party/webrtc/modules/video_capture/ensure_initialized.cc
new file mode 100644
index 00000000000..65c9a8dbe7a
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_capture/ensure_initialized.cc
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Platform-specific initialization bits, if any, go here.
+
+#if !defined(ANDROID) || !defined(WEBRTC_CHROMIUM_BUILD)
+
+namespace webrtc {
+namespace videocapturemodule {
+void EnsureInitialized() {}
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#else // !defined(ANDROID) || !defined(WEBRTC_CHROMIUM_BUILD)
+
+#include <assert.h>
+#include <pthread.h>
+
+#include "base/android/jni_android.h"
+
+// Handy alternative to assert() which suppresses unused-variable warnings when
+// assert() is a no-op (i.e. in Release builds).
+#ifdef NDEBUG
+#define ASSERT(x) if (false && (x)); else
+#else
+#define ASSERT(x) assert(x)
+#endif
+
+namespace webrtc {
+
+// Declared in webrtc/modules/video_capture/include/video_capture.h.
+int32_t SetCaptureAndroidVM(JavaVM* javaVM, jobject g_context);
+
+namespace videocapturemodule {
+
+static pthread_once_t g_initialize_once = PTHREAD_ONCE_INIT;
+
+void EnsureInitializedOnce() {
+ JNIEnv* jni = ::base::android::AttachCurrentThread();
+ jobject context = ::base::android::GetApplicationContext();
+ JavaVM* jvm = NULL;
+ int status = jni->GetJavaVM(&jvm);
+ ASSERT(status == 0);
+ status = webrtc::SetCaptureAndroidVM(jvm, context) == 0;
+ ASSERT(status);
+}
+
+void EnsureInitialized() {
+ int ret = pthread_once(&g_initialize_once, &EnsureInitializedOnce);
+ ASSERT(ret == 0);
+}
+
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#endif // ANDROID & WEBRTC_CHROMIUM_BUILD
diff --git a/chromium/third_party/webrtc/modules/video_capture/ensure_initialized.h b/chromium/third_party/webrtc/modules/video_capture/ensure_initialized.h
new file mode 100644
index 00000000000..429879537cd
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_capture/ensure_initialized.h
@@ -0,0 +1,19 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+namespace webrtc {
+namespace videocapturemodule {
+
+// Ensure any necessary initialization of webrtc::videocapturemodule has
+// completed.
+void EnsureInitialized();
+
+} // namespace videocapturemodule.
+} // namespace webrtc.
diff --git a/chromium/third_party/webrtc/modules/video_capture/include/mock/mock_video_capture.h b/chromium/third_party/webrtc/modules/video_capture/include/mock/mock_video_capture.h
new file mode 100644
index 00000000000..8ad74a23886
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_capture/include/mock/mock_video_capture.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_MOCK_MOCK_VIDEO_CAPTURE_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_MOCK_MOCK_VIDEO_CAPTURE_H_
+
+#include "webrtc/modules/video_capture/include/video_capture.h"
+#include "testing/gmock/include/gmock/gmock.h"
+
+namespace webrtc {
+
+class MockVideoCaptureModule : public VideoCaptureModule {
+ public:
+ // from Module
+ MOCK_METHOD0(TimeUntilNextProcess, int32_t());
+ MOCK_METHOD0(Process, int32_t());
+
+ // from RefCountedModule
+ MOCK_METHOD0(AddRef, int32_t());
+ MOCK_METHOD0(Release, int32_t());
+
+ // from VideoCaptureModule
+ MOCK_METHOD1(RegisterCaptureDataCallback,
+ void(VideoCaptureDataCallback& dataCallback));
+ MOCK_METHOD0(DeRegisterCaptureDataCallback, void());
+ MOCK_METHOD1(RegisterCaptureCallback, void(VideoCaptureFeedBack& callBack));
+ MOCK_METHOD0(DeRegisterCaptureCallback, void());
+ MOCK_METHOD1(StartCapture, int32_t(const VideoCaptureCapability& capability));
+ MOCK_METHOD0(StopCapture, int32_t());
+ MOCK_CONST_METHOD0(CurrentDeviceName, const char*());
+ MOCK_METHOD0(CaptureStarted, bool());
+ MOCK_METHOD1(CaptureSettings, int32_t(VideoCaptureCapability& settings));
+ MOCK_METHOD1(SetCaptureDelay, void(int32_t delayMS));
+ MOCK_METHOD0(CaptureDelay, int32_t());
+ MOCK_METHOD1(SetCaptureRotation, int32_t(VideoCaptureRotation rotation));
+ MOCK_METHOD1(GetEncodeInterface,
+ VideoCaptureEncodeInterface*(const VideoCodec& codec));
+ MOCK_METHOD1(EnableFrameRateCallback, void(const bool enable));
+ MOCK_METHOD1(EnableNoPictureAlarm, void(const bool enable));
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_MOCK_MOCK_VIDEO_CAPTURE_H_
diff --git a/chromium/third_party/webrtc/modules/video_capture/include/video_capture.h b/chromium/third_party/webrtc/modules/video_capture/include/video_capture.h
index 6b6247c2777..7398af60448 100644
--- a/chromium/third_party/webrtc/modules/video_capture/include/video_capture.h
+++ b/chromium/third_party/webrtc/modules/video_capture/include/video_capture.h
@@ -20,8 +20,8 @@
namespace webrtc {
-#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
-int32_t SetCaptureAndroidVM(JavaVM* javaVM);
+#if defined(ANDROID)
+int32_t SetCaptureAndroidVM(JavaVM* javaVM, jobject context);
#endif
class VideoCaptureModule: public RefCountedModule {
@@ -105,18 +105,17 @@ class VideoCaptureModule: public RefCountedModule {
};
// Register capture data callback
- virtual int32_t RegisterCaptureDataCallback(
+ virtual void RegisterCaptureDataCallback(
VideoCaptureDataCallback& dataCallback) = 0;
// Remove capture data callback
- virtual int32_t DeRegisterCaptureDataCallback() = 0;
+ virtual void DeRegisterCaptureDataCallback() = 0;
// Register capture callback.
- virtual int32_t RegisterCaptureCallback(
- VideoCaptureFeedBack& callBack) = 0;
+ virtual void RegisterCaptureCallback(VideoCaptureFeedBack& callBack) = 0;
// Remove capture callback.
- virtual int32_t DeRegisterCaptureCallback() = 0;
+ virtual void DeRegisterCaptureCallback() = 0;
// Start capture device
virtual int32_t StartCapture(
@@ -133,7 +132,7 @@ class VideoCaptureModule: public RefCountedModule {
// Gets the current configuration.
virtual int32_t CaptureSettings(VideoCaptureCapability& settings) = 0;
- virtual int32_t SetCaptureDelay(int32_t delayMS) = 0;
+ virtual void SetCaptureDelay(int32_t delayMS) = 0;
// Returns the current CaptureDelay. Only valid when the camera is running.
virtual int32_t CaptureDelay() = 0;
@@ -149,8 +148,8 @@ class VideoCaptureModule: public RefCountedModule {
virtual VideoCaptureEncodeInterface* GetEncodeInterface(
const VideoCodec& codec) = 0;
- virtual int32_t EnableFrameRateCallback(const bool enable) = 0;
- virtual int32_t EnableNoPictureAlarm(const bool enable) = 0;
+ virtual void EnableFrameRateCallback(const bool enable) = 0;
+ virtual void EnableNoPictureAlarm(const bool enable) = 0;
protected:
virtual ~VideoCaptureModule() {};
diff --git a/chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios.mm b/chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios.mm
index c51a53a3372..dea9fc34a41 100644
--- a/chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios.mm
+++ b/chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios.mm
@@ -8,6 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
#include "webrtc/modules/video_capture/ios/device_info_ios.h"
#include "webrtc/modules/video_capture/ios/device_info_ios_objc.h"
#include "webrtc/modules/video_capture/video_capture_impl.h"
diff --git a/chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios_objc.mm b/chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios_objc.mm
index 2d11a2043fb..d06d3361f45 100644
--- a/chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios_objc.mm
+++ b/chromium/third_party/webrtc/modules/video_capture/ios/device_info_ios_objc.mm
@@ -8,6 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
#import <AVFoundation/AVFoundation.h>
#import "webrtc/modules/video_capture/ios/device_info_ios_objc.h"
diff --git a/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios_objc.h b/chromium/third_party/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h
index 8e50facba19..7d4147b430d 100644
--- a/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios_objc.h
+++ b/chromium/third_party/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h
@@ -11,18 +11,17 @@
#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_IOS_VIDEO_CAPTURE_IOS_OBJC_H_
#define WEBRTC_MODULES_VIDEO_CAPTURE_IOS_VIDEO_CAPTURE_IOS_OBJC_H_
+#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#include "webrtc/modules/video_capture/ios/video_capture_ios.h"
-@interface VideoCaptureIosObjC
- : UIViewController<AVCaptureVideoDataOutputSampleBufferDelegate> {
- @private
- webrtc::videocapturemodule::VideoCaptureIos* _owner;
- webrtc::VideoCaptureCapability _capability;
- AVCaptureSession* _captureSession;
- int _captureId;
-}
+// The following class listens to a notification with name:
+// 'StatusBarOrientationDidChange'.
+// This notification must be posted in order for the capturer to reflect the
+// orientation change in video w.r.t. the application orientation.
+@interface RTCVideoCaptureIosObjC
+ : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate>
@property webrtc::VideoCaptureRotation frameRotation;
@@ -31,7 +30,7 @@
// default init methods have been overridden to return nil.
- (id)initWithOwner:(webrtc::videocapturemodule::VideoCaptureIos*)owner
captureId:(int)captureId;
-- (BOOL)setCaptureDeviceByUniqueId:(NSString*)uniequeId;
+- (BOOL)setCaptureDeviceByUniqueId:(NSString*)uniqueId;
- (BOOL)startCaptureWithCapability:
(const webrtc::VideoCaptureCapability&)capability;
- (BOOL)stopCapture;
diff --git a/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios_objc.mm b/chromium/third_party/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.mm
index 5b8d69786e8..641ca2416b2 100644
--- a/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios_objc.mm
+++ b/chromium/third_party/webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.mm
@@ -8,20 +8,33 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#import <UIKit/UIKit.h>
+
#import "webrtc/modules/video_capture/ios/device_info_ios_objc.h"
-#import "webrtc/modules/video_capture/ios/video_capture_ios_objc.h"
+#import "webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h"
#include "webrtc/system_wrappers/interface/trace.h"
using namespace webrtc;
using namespace webrtc::videocapturemodule;
-@interface VideoCaptureIosObjC (hidden)
+@interface RTCVideoCaptureIosObjC (hidden)
- (int)changeCaptureInputWithName:(NSString*)captureDeviceName;
-
@end
-@implementation VideoCaptureIosObjC
+@implementation RTCVideoCaptureIosObjC {
+ webrtc::videocapturemodule::VideoCaptureIos* _owner;
+ webrtc::VideoCaptureCapability _capability;
+ AVCaptureSession* _captureSession;
+ int _captureId;
+ AVCaptureConnection* _connection;
+ BOOL _captureChanging; // Guarded by _captureChangingCondition.
+ NSCondition* _captureChangingCondition;
+}
@synthesize frameRotation = _framRotation;
@@ -30,16 +43,16 @@ using namespace webrtc::videocapturemodule;
_owner = owner;
_captureId = captureId;
_captureSession = [[AVCaptureSession alloc] init];
+ _captureChanging = NO;
+ _captureChangingCondition = [[NSCondition alloc] init];
- if (!_captureSession) {
+ if (!_captureSession || !_captureChangingCondition) {
return nil;
}
// create and configure a new output (using callbacks)
AVCaptureVideoDataOutput* captureOutput =
[[AVCaptureVideoDataOutput alloc] init];
- [captureOutput setSampleBufferDelegate:self
- queue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* val = [NSNumber
@@ -66,12 +79,36 @@ using namespace webrtc::videocapturemodule;
selector:@selector(onVideoError:)
name:AVCaptureSessionRuntimeErrorNotification
object:_captureSession];
+ [notify addObserver:self
+ selector:@selector(statusBarOrientationDidChange:)
+ name:@"StatusBarOrientationDidChange"
+ object:nil];
}
return self;
}
+- (void)directOutputToSelf {
+ [[self currentOutput]
+ setSampleBufferDelegate:self
+ queue:dispatch_get_global_queue(
+ DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
+}
+
+- (void)directOutputToNil {
+ [[self currentOutput] setSampleBufferDelegate:nil queue:NULL];
+}
+
+- (void)statusBarOrientationDidChange:(NSNotification*)notification {
+ [self setRelativeVideoOrientation];
+}
+
+- (void)dealloc {
+ [[NSNotificationCenter defaultCenter] removeObserver:self];
+}
+
- (BOOL)setCaptureDeviceByUniqueId:(NSString*)uniqueId {
+ [self waitForCaptureChangeToFinish];
// check to see if the camera is already set
if (_captureSession) {
NSArray* currentInputs = [NSArray arrayWithArray:[_captureSession inputs]];
@@ -87,6 +124,7 @@ using namespace webrtc::videocapturemodule;
}
- (BOOL)startCaptureWithCapability:(const VideoCaptureCapability&)capability {
+ [self waitForCaptureChangeToFinish];
if (!_captureSession) {
return NO;
}
@@ -121,11 +159,25 @@ using namespace webrtc::videocapturemodule;
_capability = capability;
- NSArray* currentOutputs = [_captureSession outputs];
- if ([currentOutputs count] == 0) {
+ AVCaptureVideoDataOutput* currentOutput = [self currentOutput];
+ if (!currentOutput)
return NO;
- }
+ [self directOutputToSelf];
+
+ _captureChanging = YES;
+ dispatch_async(
+ dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
+ ^(void) { [self startCaptureInBackgroundWithOutput:currentOutput]; });
+ return YES;
+}
+
+- (AVCaptureVideoDataOutput*)currentOutput {
+ return [[_captureSession outputs] firstObject];
+}
+
+- (void)startCaptureInBackgroundWithOutput:
+ (AVCaptureVideoDataOutput*)currentOutput {
NSString* captureQuality =
[NSString stringWithString:AVCaptureSessionPresetLow];
if (_capability.width >= 1920 || _capability.height >= 1080) {
@@ -139,9 +191,6 @@ using namespace webrtc::videocapturemodule;
captureQuality = [NSString stringWithString:AVCaptureSessionPreset352x288];
}
- AVCaptureVideoDataOutput* currentOutput =
- (AVCaptureVideoDataOutput*)[currentOutputs objectAtIndex:0];
-
// begin configuration for the AVCaptureSession
[_captureSession beginConfiguration];
@@ -149,25 +198,42 @@ using namespace webrtc::videocapturemodule;
[_captureSession setSessionPreset:captureQuality];
// take care of capture framerate now
- AVCaptureConnection* connection =
- [currentOutput connectionWithMediaType:AVMediaTypeVideo];
-
+ _connection = [currentOutput connectionWithMediaType:AVMediaTypeVideo];
+ [self setRelativeVideoOrientation];
CMTime cm_time = {1, _capability.maxFPS, kCMTimeFlags_Valid, 0};
- [connection setVideoMinFrameDuration:cm_time];
- [connection setVideoMaxFrameDuration:cm_time];
+ [_connection setVideoMinFrameDuration:cm_time];
+ [_connection setVideoMaxFrameDuration:cm_time];
// finished configuring, commit settings to AVCaptureSession.
[_captureSession commitConfiguration];
[_captureSession startRunning];
+ [self signalCaptureChangeEnd];
+}
- [captureQuality release];
-
- return YES;
+- (void)setRelativeVideoOrientation {
+ if (!_connection.supportsVideoOrientation)
+ return;
+ switch ([UIApplication sharedApplication].statusBarOrientation) {
+ case UIInterfaceOrientationPortrait:
+ _connection.videoOrientation = AVCaptureVideoOrientationPortrait;
+ break;
+ case UIInterfaceOrientationPortraitUpsideDown:
+ _connection.videoOrientation =
+ AVCaptureVideoOrientationPortraitUpsideDown;
+ break;
+ case UIInterfaceOrientationLandscapeLeft:
+ _connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
+ break;
+ case UIInterfaceOrientationLandscapeRight:
+ _connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
+ break;
+ }
}
-- (void)onVideoError {
+- (void)onVideoError:(NSNotification*)notification {
+ NSLog(@"onVideoError: %@", notification);
// TODO(sjlee): make the specific error handling with this notification.
WEBRTC_TRACE(kTraceError,
kTraceVideoCapture,
@@ -179,16 +245,26 @@ using namespace webrtc::videocapturemodule;
}
- (BOOL)stopCapture {
+ [self waitForCaptureChangeToFinish];
+ [self directOutputToNil];
+
if (!_captureSession) {
return NO;
}
- [_captureSession stopRunning];
-
+ _captureChanging = YES;
+ dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
+ ^(void) { [self stopCaptureInBackground]; });
return YES;
}
+- (void)stopCaptureInBackground {
+ [_captureSession stopRunning];
+ [self signalCaptureChangeEnd];
+}
+
- (BOOL)changeCaptureInputByUniqueId:(NSString*)uniqueId {
+ [self waitForCaptureChangeToFinish];
NSArray* currentInputs = [_captureSession inputs];
// remove current input
if ([currentInputs count] > 0) {
@@ -284,4 +360,18 @@ using namespace webrtc::videocapturemodule;
CVPixelBufferUnlockBaseAddress(videoFrame, kFlags);
}
+- (void)signalCaptureChangeEnd {
+ [_captureChangingCondition lock];
+ _captureChanging = NO;
+ [_captureChangingCondition signal];
+ [_captureChangingCondition unlock];
+}
+
+- (void)waitForCaptureChangeToFinish {
+ [_captureChangingCondition lock];
+ while (_captureChanging) {
+ [_captureChangingCondition wait];
+ }
+ [_captureChangingCondition unlock];
+}
@end
diff --git a/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.h b/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.h
index 5d7e4b35725..ff8345f26ee 100644
--- a/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.h
+++ b/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.h
@@ -13,7 +13,7 @@
#include "webrtc/modules/video_capture/video_capture_impl.h"
-@class VideoCaptureIosObjC;
+@class RTCVideoCaptureIosObjC;
namespace webrtc {
namespace videocapturemodule {
@@ -33,7 +33,7 @@ class VideoCaptureIos : public VideoCaptureImpl {
virtual int32_t CaptureSettings(VideoCaptureCapability& settings) OVERRIDE;
private:
- VideoCaptureIosObjC* capture_device_;
+ RTCVideoCaptureIosObjC* capture_device_;
bool is_capturing_;
int32_t id_;
VideoCaptureCapability capability_;
diff --git a/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.mm b/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.mm
index bb576c3fcdf..2010f03080c 100644
--- a/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.mm
+++ b/chromium/third_party/webrtc/modules/video_capture/ios/video_capture_ios.mm
@@ -8,8 +8,12 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
#include "webrtc/modules/video_capture/ios/device_info_ios_objc.h"
-#include "webrtc/modules/video_capture/ios/video_capture_ios_objc.h"
+#include "webrtc/modules/video_capture/ios/rtc_video_capture_ios_objc.h"
#include "webrtc/system_wrappers/interface/ref_count.h"
#include "webrtc/system_wrappers/interface/scoped_refptr.h"
#include "webrtc/system_wrappers/interface/trace.h"
@@ -30,7 +34,7 @@ VideoCaptureIos::VideoCaptureIos(const int32_t capture_id)
}
VideoCaptureIos::~VideoCaptureIos() {
- if (capture_device_) {
+ if (is_capturing_) {
[capture_device_ stopCapture];
}
}
@@ -53,8 +57,8 @@ VideoCaptureModule* VideoCaptureIos::Create(const int32_t capture_id,
capture_module->_deviceUniqueId[name_length] = '\0';
capture_module->capture_device_ =
- [[VideoCaptureIosObjC alloc] initWithOwner:capture_module
- captureId:capture_module->id_];
+ [[RTCVideoCaptureIosObjC alloc] initWithOwner:capture_module
+ captureId:capture_module->id_];
if (!capture_module->capture_device_) {
return NULL;
}
@@ -86,7 +90,6 @@ int32_t VideoCaptureIos::StopCapture() {
}
is_capturing_ = false;
-
return 0;
}
diff --git a/chromium/third_party/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm b/chromium/third_party/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm
index 7b46aec1925..2b18e1eca17 100644
--- a/chromium/third_party/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm
+++ b/chromium/third_party/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm
@@ -155,11 +155,14 @@ using namespace webrtc;
- (void)checkOSSupported
{
Class osSupportedTest = NSClassFromString(@"QTCaptureSession");
- _OSSupportedInfo = NO;
if(nil == osSupportedTest)
{
+ _OSSupportedInfo = NO;
+ }
+ else
+ {
+ _OSSupportedInfo = YES;
}
- _OSSupportedInfo = YES;
}
/// ***** Retrieves the number of capture devices currently available
diff --git a/chromium/third_party/webrtc/modules/video_capture/video_capture.gypi b/chromium/third_party/webrtc/modules/video_capture/video_capture.gypi
index b8a63c9c4a6..dced22ce08d 100644
--- a/chromium/third_party/webrtc/modules/video_capture/video_capture.gypi
+++ b/chromium/third_party/webrtc/modules/video_capture/video_capture.gypi
@@ -60,6 +60,7 @@
'link_settings': {
'xcode_settings': {
'OTHER_LDFLAGS': [
+ '-framework CoreVideo',
'-framework QTKit',
],
},
@@ -108,11 +109,14 @@
'ios/device_info_ios.mm',
'ios/device_info_ios_objc.h',
'ios/device_info_ios_objc.mm',
+ 'ios/rtc_video_capture_ios_objc.h',
+ 'ios/rtc_video_capture_ios_objc.mm',
'ios/video_capture_ios.h',
'ios/video_capture_ios.mm',
- 'ios/video_capture_ios_objc.h',
- 'ios/video_capture_ios_objc.mm',
],
+ 'xcode_settings': {
+ 'CLANG_ENABLE_OBJC_ARC': 'YES',
+ },
'all_dependent_settings': {
'xcode_settings': {
'OTHER_LDFLAGS': [
@@ -130,11 +134,20 @@
},
],
'conditions': [
+ ['include_tests==1 and build_with_chromium==1 and OS=="android"', {
+ # Use WebRTC capture code for Android APK tests that are built from a
+ # Chromium checkout. Normally when built as a part of Chromium the
+ # Chromium video capture code is used. This overrides the default in
+ # webrtc/build/common.gypi.
+ 'variables': {
+ 'include_internal_video_capture': 1,
+ },
+ }],
['include_tests==1', {
'targets': [
{
'target_name': 'video_capture_tests',
- 'type': 'executable',
+ 'type': '<(gtest_target_type)',
'dependencies': [
'video_capture_module',
'webrtc_utility',
@@ -142,6 +155,8 @@
'<(DEPTH)/testing/gtest.gyp:gtest',
],
'sources': [
+ 'ensure_initialized.cc',
+ 'ensure_initialized.h',
'test/video_capture_unittest.cc',
'test/video_capture_main_mac.mm',
],
@@ -161,6 +176,13 @@
'-lX11',
],
}],
+ # TODO(henrike): remove build_with_chromium==1 when the bots are
+ # using Chromium's buildbots.
+ ['build_with_chromium==1 and OS=="android"', {
+ 'dependencies': [
+ '<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
+ ],
+ }],
['OS=="mac"', {
'dependencies': [
# Link with a special main for mac so we can use the webcam.
diff --git a/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc b/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc
index 6689fd132c7..6f179e2da66 100644
--- a/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.cc
@@ -17,9 +17,9 @@
#include "webrtc/modules/video_capture/video_capture_config.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/ref_count.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc
@@ -187,45 +187,33 @@ VideoCaptureImpl::~VideoCaptureImpl()
delete[] _deviceUniqueId;
}
-int32_t VideoCaptureImpl::RegisterCaptureDataCallback(
- VideoCaptureDataCallback& dataCallBack)
-{
+void VideoCaptureImpl::RegisterCaptureDataCallback(
+ VideoCaptureDataCallback& dataCallBack) {
CriticalSectionScoped cs(&_apiCs);
CriticalSectionScoped cs2(&_callBackCs);
_dataCallBack = &dataCallBack;
-
- return 0;
}
-int32_t VideoCaptureImpl::DeRegisterCaptureDataCallback()
-{
+void VideoCaptureImpl::DeRegisterCaptureDataCallback() {
CriticalSectionScoped cs(&_apiCs);
CriticalSectionScoped cs2(&_callBackCs);
_dataCallBack = NULL;
- return 0;
}
-int32_t VideoCaptureImpl::RegisterCaptureCallback(VideoCaptureFeedBack& callBack)
-{
+void VideoCaptureImpl::RegisterCaptureCallback(VideoCaptureFeedBack& callBack) {
CriticalSectionScoped cs(&_apiCs);
CriticalSectionScoped cs2(&_callBackCs);
_captureCallBack = &callBack;
- return 0;
}
-int32_t VideoCaptureImpl::DeRegisterCaptureCallback()
-{
+void VideoCaptureImpl::DeRegisterCaptureCallback() {
CriticalSectionScoped cs(&_apiCs);
CriticalSectionScoped cs2(&_callBackCs);
_captureCallBack = NULL;
- return 0;
-
}
-int32_t VideoCaptureImpl::SetCaptureDelay(int32_t delayMS)
-{
+void VideoCaptureImpl::SetCaptureDelay(int32_t delayMS) {
CriticalSectionScoped cs(&_apiCs);
_captureDelay = delayMS;
- return 0;
}
int32_t VideoCaptureImpl::CaptureDelay()
{
@@ -272,13 +260,8 @@ int32_t VideoCaptureImpl::IncomingFrame(
const VideoCaptureCapability& frameInfo,
int64_t captureTime/*=0*/)
{
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideoCapture, _id,
- "IncomingFrame width %d, height %d", (int) frameInfo.width,
- (int) frameInfo.height);
-
- TickTime startProcessTime = TickTime::Now();
-
- CriticalSectionScoped cs(&_callBackCs);
+ CriticalSectionScoped cs(&_apiCs);
+ CriticalSectionScoped cs2(&_callBackCs);
const int32_t width = frameInfo.width;
const int32_t height = frameInfo.height;
@@ -295,8 +278,7 @@ int32_t VideoCaptureImpl::IncomingFrame(
CalcBufferSize(commonVideoType, width,
abs(height)) != videoFrameLength)
{
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "Wrong incoming frame length.");
+ LOG(LS_ERROR) << "Wrong incoming frame length.";
return -1;
}
@@ -320,8 +302,8 @@ int32_t VideoCaptureImpl::IncomingFrame(
stride_uv, stride_uv);
if (ret < 0)
{
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "Failed to allocate I420 frame.");
+ LOG(LS_ERROR) << "Failed to create empty frame, this should only "
+ "happen due to bad parameters.";
return -1;
}
const int conversionResult = ConvertToI420(commonVideoType,
@@ -333,9 +315,8 @@ int32_t VideoCaptureImpl::IncomingFrame(
&_captureFrame);
if (conversionResult < 0)
{
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
- "Failed to convert capture frame from type %d to I420",
- frameInfo.rawType);
+ LOG(LS_ERROR) << "Failed to convert capture frame from type "
+ << frameInfo.rawType << "to I420.";
return -1;
}
DeliverCapturedFrame(_captureFrame, captureTime);
@@ -346,22 +327,14 @@ int32_t VideoCaptureImpl::IncomingFrame(
return -1;
}
- const uint32_t processTime =
- (uint32_t)(TickTime::Now() - startProcessTime).Milliseconds();
- if (processTime > 10) // If the process time is too long MJPG will not work well.
- {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
- "Too long processing time of Incoming frame: %ums",
- (unsigned int) processTime);
- }
-
return 0;
}
int32_t VideoCaptureImpl::IncomingI420VideoFrame(I420VideoFrame* video_frame,
int64_t captureTime) {
- CriticalSectionScoped cs(&_callBackCs);
+ CriticalSectionScoped cs(&_apiCs);
+ CriticalSectionScoped cs2(&_callBackCs);
DeliverCapturedFrame(*video_frame, captureTime);
return 0;
@@ -389,8 +362,7 @@ int32_t VideoCaptureImpl::SetCaptureRotation(VideoCaptureRotation rotation) {
return 0;
}
-int32_t VideoCaptureImpl::EnableFrameRateCallback(const bool enable)
-{
+void VideoCaptureImpl::EnableFrameRateCallback(const bool enable) {
CriticalSectionScoped cs(&_apiCs);
CriticalSectionScoped cs2(&_callBackCs);
_frameRateCallBack = enable;
@@ -398,15 +370,12 @@ int32_t VideoCaptureImpl::EnableFrameRateCallback(const bool enable)
{
_lastFrameRateCallbackTime = TickTime::Now();
}
- return 0;
}
-int32_t VideoCaptureImpl::EnableNoPictureAlarm(const bool enable)
-{
+void VideoCaptureImpl::EnableNoPictureAlarm(const bool enable) {
CriticalSectionScoped cs(&_apiCs);
CriticalSectionScoped cs2(&_callBackCs);
_noPictureAlarmCallBack = enable;
- return 0;
}
void VideoCaptureImpl::UpdateFrameCount()
diff --git a/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h b/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h
index 80d5e67862a..f3a4c64cbd6 100644
--- a/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h
+++ b/chromium/third_party/webrtc/modules/video_capture/video_capture_impl.h
@@ -62,17 +62,18 @@ public:
virtual int32_t ChangeUniqueId(const int32_t id);
//Call backs
- virtual int32_t RegisterCaptureDataCallback(VideoCaptureDataCallback& dataCallback);
- virtual int32_t DeRegisterCaptureDataCallback();
- virtual int32_t RegisterCaptureCallback(VideoCaptureFeedBack& callBack);
- virtual int32_t DeRegisterCaptureCallback();
+ virtual void RegisterCaptureDataCallback(
+ VideoCaptureDataCallback& dataCallback);
+ virtual void DeRegisterCaptureDataCallback();
+ virtual void RegisterCaptureCallback(VideoCaptureFeedBack& callBack);
+ virtual void DeRegisterCaptureCallback();
- virtual int32_t SetCaptureDelay(int32_t delayMS);
+ virtual void SetCaptureDelay(int32_t delayMS);
virtual int32_t CaptureDelay();
virtual int32_t SetCaptureRotation(VideoCaptureRotation rotation);
- virtual int32_t EnableFrameRateCallback(const bool enable);
- virtual int32_t EnableNoPictureAlarm(const bool enable);
+ virtual void EnableFrameRateCallback(const bool enable);
+ virtual void EnableNoPictureAlarm(const bool enable);
virtual const char* CurrentDeviceName() const;
diff --git a/chromium/third_party/webrtc/modules/video_capture/video_capture_tests.isolate b/chromium/third_party/webrtc/modules/video_capture/video_capture_tests.isolate
index 30374ce4e4a..57dd66739fd 100644
--- a/chromium/third_party/webrtc/modules/video_capture/video_capture_tests.isolate
+++ b/chromium/third_party/webrtc/modules/video_capture/video_capture_tests.isolate
@@ -8,27 +8,25 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../../data/',
- '../../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/video_capture_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/video_capture_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],
diff --git a/chromium/third_party/webrtc/modules/video_capture/windows/sink_filter_ds.cc b/chromium/third_party/webrtc/modules/video_capture/windows/sink_filter_ds.cc
index 144f8833b75..2edbe59c4aa 100644
--- a/chromium/third_party/webrtc/modules/video_capture/windows/sink_filter_ds.cc
+++ b/chromium/third_party/webrtc/modules/video_capture/windows/sink_filter_ds.cc
@@ -437,6 +437,7 @@ CaptureSinkFilter::GetPin(IN int Index)
STDMETHODIMP CaptureSinkFilter::Pause()
{
+ LockReceive();
LockFilter();
if (m_State == State_Stopped)
{
@@ -456,6 +457,7 @@ STDMETHODIMP CaptureSinkFilter::Pause()
m_State = State_Paused;
}
UnlockFilter();
+ UnlockReceive();
return S_OK;
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/i420/main/source/OWNERS b/chromium/third_party/webrtc/modules/video_coding/codecs/i420/main/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/i420/main/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/OWNERS b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc
index 9eda36e8439..dcd74790c29 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.cc
@@ -26,14 +26,12 @@ using namespace webrtc;
NormalAsyncTest::NormalAsyncTest()
:
-NormalTest("Async Normal Test 1", "A test of normal execution of the codec",
- _testNo),
+NormalTest("Async Normal Test 1", "A test of normal execution of the codec", 1),
_decodeCompleteTime(0),
_encodeCompleteTime(0),
_encFrameCnt(0),
_decFrameCnt(0),
_requestKeyFrame(false),
-_testNo(1),
_appendNext(false),
_missingFrames(false),
_rttFrames(0),
@@ -47,13 +45,13 @@ _waitForKey(false)
NormalAsyncTest::NormalAsyncTest(uint32_t bitRate)
:
NormalTest("Async Normal Test 1", "A test of normal execution of the codec",
- bitRate, _testNo),
+ bitRate,
+ 1),
_decodeCompleteTime(0),
_encodeCompleteTime(0),
_encFrameCnt(0),
_decFrameCnt(0),
_requestKeyFrame(false),
-_testNo(1),
_appendNext(false),
_missingFrames(false),
_rttFrames(0),
@@ -67,13 +65,12 @@ _waitForKey(false)
NormalAsyncTest::NormalAsyncTest(std::string name, std::string description,
unsigned int testNo)
:
-NormalTest(name, description, _testNo),
+NormalTest(name, description, testNo),
_decodeCompleteTime(0),
_encodeCompleteTime(0),
_encFrameCnt(0),
_decFrameCnt(0),
_requestKeyFrame(false),
-_testNo(testNo),
_lengthEncFrame(0),
_appendNext(false),
_missingFrames(false),
@@ -88,13 +85,12 @@ _waitForKey(false)
NormalAsyncTest::NormalAsyncTest(std::string name, std::string description,
uint32_t bitRate, unsigned int testNo)
:
-NormalTest(name, description, bitRate, _testNo),
+NormalTest(name, description, bitRate, testNo),
_decodeCompleteTime(0),
_encodeCompleteTime(0),
_encFrameCnt(0),
_decFrameCnt(0),
_requestKeyFrame(false),
-_testNo(testNo),
_lengthEncFrame(0),
_appendNext(false),
_missingFrames(false),
@@ -110,13 +106,12 @@ NormalAsyncTest::NormalAsyncTest(std::string name, std::string description,
uint32_t bitRate, unsigned int testNo,
unsigned int rttFrames)
:
-NormalTest(name, description, bitRate, _testNo),
+NormalTest(name, description, bitRate, testNo),
_decodeCompleteTime(0),
_encodeCompleteTime(0),
_encFrameCnt(0),
_decFrameCnt(0),
_requestKeyFrame(false),
-_testNo(testNo),
_lengthEncFrame(0),
_appendNext(false),
_missingFrames(false),
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h
index d2d17eebc7f..1e62534acab 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/normal_async_test.h
@@ -118,7 +118,6 @@ protected:
int _encFrameCnt;
int _decFrameCnt;
bool _requestKeyFrame;
- unsigned int _testNo;
unsigned int _lengthEncFrame;
FrameQueueTuple* _frameToDecode;
bool _appendNext;
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/test.h b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/test.h
index 890d0cb50cb..7558abe6bbf 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/test.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/test.h
@@ -41,7 +41,7 @@ protected:
virtual void Teardown();
double ActualBitRate(int nFrames);
virtual bool PacketLoss(double lossRate, int /*thrown*/);
- static double RandUniform() { return (std::rand() + 1.0)/(RAND_MAX + 1.0); }
+ static double RandUniform() { return (rand() + 1.0)/(RAND_MAX + 1.0); }
static void VideoEncodedBufferToEncodedImage(
webrtc::VideoFrame& videoBuffer,
webrtc::EncodedImage &image);
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/unit_test.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/unit_test.cc
index 3b034e01c60..ec12a51693c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/unit_test.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/test_framework/unit_test.cc
@@ -565,7 +565,7 @@ UnitTest::Perform()
frameLength = WaitForDecodedFrame();
}
unsigned int length = CalcBufferSize(kI420, width, height);
- scoped_array<uint8_t> decoded_buffer(new uint8_t[length]);
+ scoped_ptr<uint8_t[]> decoded_buffer(new uint8_t[length]);
ExtractBuffer(_decodedVideoBuffer, _lengthSourceFrame,
decoded_buffer.get());
EXPECT_TRUE(CheckIfBitExact(decoded_buffer.get(), frameLength, _refDecFrame,
@@ -645,7 +645,7 @@ UnitTest::Perform()
// check that decoded frame matches with reference
unsigned int length = CalcBufferSize(kI420, width, height);
- scoped_array<uint8_t> decoded_buffer(new uint8_t[length]);
+ scoped_ptr<uint8_t[]> decoded_buffer(new uint8_t[length]);
ExtractBuffer(_decodedVideoBuffer, length, decoded_buffer.get());
EXPECT_TRUE(CheckIfBitExact(decoded_buffer.get(), length,
_refDecFrame, _lengthSourceFrame) == true);
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/tools/OWNERS b/chromium/third_party/webrtc/modules/video_coding/codecs/tools/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/tools/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/tools/video_codecs_tools.gypi b/chromium/third_party/webrtc/modules/video_coding/codecs/tools/video_codecs_tools.gypi
index cdae0afeaad..8f15b28504d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/tools/video_codecs_tools.gypi
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/tools/video_codecs_tools.gypi
@@ -17,6 +17,7 @@
'video_codecs_test_framework',
'webrtc_video_coding',
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
'<(webrtc_root)/test/metrics.gyp:metrics',
'<(webrtc_vp8_dir)/vp8.gyp:webrtc_vp8',
],
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/OWNERS b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8.gyp b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8.gyp
index b3859a5978d..621c244cdfe 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8.gyp
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8.gyp
@@ -31,6 +31,7 @@
'reference_picture_selection.cc',
'include/vp8.h',
'include/vp8_common_types.h',
+ 'vp8_factory.cc',
'vp8_impl.cc',
'default_temporal_layers.cc',
'default_temporal_layers.h',
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_factory.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_factory.cc
new file mode 100644
index 00000000000..995191e3c04
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_factory.cc
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#include "webrtc/modules/video_coding/codecs/vp8/vp8_impl.h"
+
+namespace webrtc {
+
+VP8Encoder* VP8Encoder::Create() {
+ return new VP8EncoderImpl();
+}
+
+VP8Decoder* VP8Decoder::Create() {
+ return new VP8DecoderImpl();
+}
+
+} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
index 910a5f3f8bb..4901edff3d5 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.cc
@@ -35,10 +35,6 @@ enum { kVp8ErrorPropagationTh = 30 };
namespace webrtc {
-VP8Encoder* VP8Encoder::Create() {
- return new VP8EncoderImpl();
-}
-
VP8EncoderImpl::VP8EncoderImpl()
: encoded_image_(),
encoded_complete_callback_(NULL),
@@ -218,7 +214,10 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
}
config_->g_lag_in_frames = 0; // 0- no frame lagging
- if (codec_.width * codec_.height > 1280 * 960 && number_of_cores >= 6) {
+ if (codec_.width * codec_.height >= 1920 * 1080 && number_of_cores > 8) {
+ config_->g_threads = 8; // 8 threads for 1080p on high perf machines.
+ } else if (codec_.width * codec_.height > 1280 * 960 &&
+ number_of_cores >= 6) {
config_->g_threads = 3; // 3 threads for 1080p.
} else if (codec_.width * codec_.height > 640 * 480 && number_of_cores >= 3) {
config_->g_threads = 2; // 2 threads for qHD/HD.
@@ -477,8 +476,8 @@ int VP8EncoderImpl::GetEncodedPartitions(const I420VideoFrame& input_image) {
TRACE_COUNTER1("webrtc", "EncodedFrameSize", encoded_image_._length);
encoded_image_._timeStamp = input_image.timestamp();
encoded_image_.capture_time_ms_ = input_image.render_time_ms();
- encoded_image_._encodedHeight = raw_->h;
- encoded_image_._encodedWidth = raw_->w;
+ encoded_image_._encodedHeight = codec_.height;
+ encoded_image_._encodedWidth = codec_.width;
encoded_complete_callback_->Encoded(encoded_image_, &codec_specific,
&frag_info);
}
@@ -496,10 +495,6 @@ int VP8EncoderImpl::RegisterEncodeCompleteCallback(
return WEBRTC_VIDEO_CODEC_OK;
}
-VP8Decoder* VP8Decoder::Create() {
- return new VP8DecoderImpl();
-}
-
VP8DecoderImpl::VP8DecoderImpl()
: decode_complete_callback_(NULL),
inited_(false),
@@ -718,7 +713,7 @@ int VP8DecoderImpl::Decode(const EncodedImage& input_image,
}
img = vpx_codec_get_frame(decoder_, &iter);
- ret = ReturnFrame(img, input_image._timeStamp);
+ ret = ReturnFrame(img, input_image._timeStamp, input_image.ntp_time_ms_);
if (ret != 0) {
// Reset to avoid requesting key frames too often.
if (ret < 0 && propagation_cnt_ > 0)
@@ -798,7 +793,9 @@ int VP8DecoderImpl::DecodePartitions(
return WEBRTC_VIDEO_CODEC_OK;
}
-int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
+int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img,
+ uint32_t timestamp,
+ int64_t ntp_time_ms) {
if (img == NULL) {
// Decoder OK and NULL image => No show frame
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
@@ -816,6 +813,7 @@ int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
img->stride[VPX_PLANE_U],
img->stride[VPX_PLANE_V]);
decoded_image_.set_timestamp(timestamp);
+ decoded_image_.set_ntp_time_ms(ntp_time_ms);
int ret = decode_complete_callback_->Decoded(decoded_image_);
if (ret != 0)
return ret;
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
index 26dd52e6a93..56f7219fc1a 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_impl.h
@@ -214,7 +214,9 @@ class VP8DecoderImpl : public VP8Decoder {
int DecodePartitions(const EncodedImage& input_image,
const RTPFragmentationHeader* fragmentation);
- int ReturnFrame(const vpx_image_t* img, uint32_t timeStamp);
+ int ReturnFrame(const vpx_image_t* img,
+ uint32_t timeStamp,
+ int64_t ntp_time_ms);
I420VideoFrame decoded_image_;
DecodedImageCallback* decode_complete_callback_;
diff --git a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
index 1bd3e1a6238..ffa0bcc681f 100644
--- a/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/codecs/vp8/vp8_sequence_coder.cc
@@ -142,7 +142,7 @@ int SequenceCoder(webrtc::test::CommandLineParser& parser) {
EXPECT_EQ(0, decoder->InitDecode(&inst, 1));
webrtc::I420VideoFrame input_frame;
unsigned int length = webrtc::CalcBufferSize(webrtc::kI420, width, height);
- webrtc::scoped_array<uint8_t> frame_buffer(new uint8_t[length]);
+ webrtc::scoped_ptr<uint8_t[]> frame_buffer(new uint8_t[length]);
int half_width = (width + 1) / 2;
// Set and register callbacks.
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/interface/video_coding.h b/chromium/third_party/webrtc/modules/video_coding/main/interface/video_coding.h
index c0166761e83..cad0e5ab879 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/interface/video_coding.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/interface/video_coding.h
@@ -72,11 +72,9 @@ public:
kReferenceSelection
};
- static VideoCodingModule* Create(const int32_t id);
+ static VideoCodingModule* Create();
- static VideoCodingModule* Create(const int32_t id,
- Clock* clock,
- EventFactory* event_factory);
+ static VideoCodingModule* Create(Clock* clock, EventFactory* event_factory);
static void Destroy(VideoCodingModule* module);
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/Android.mk b/chromium/third_party/webrtc/modules/video_coding/main/source/Android.mk
index 9ebdbed9a66..a8cf2d0e64c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/Android.mk
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/Android.mk
@@ -37,7 +37,6 @@ LOCAL_SRC_FILES := \
receiver.cc \
rtt_filter.cc \
session_info.cc \
- timestamp_extrapolator.cc \
timestamp_map.cc \
timing.cc \
video_coding_impl.cc
@@ -56,7 +55,7 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/../../../../common_video/vplib/main/interface \
$(LOCAL_PATH)/../../../../common_video/interface \
$(LOCAL_PATH)/../../utility/include \
- $(LOCAL_PATH)/../../../../system_wrappers/interface
+ $(LOCAL_PATH)/../../../../system_wrappers/interface
LOCAL_SHARED_LIBRARIES := \
libcutils \
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/OWNERS b/chromium/third_party/webrtc/modules/video_coding/main/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.cc
index e41c6b42534..e7a9d91b138 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.cc
@@ -20,7 +20,7 @@
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#endif
#include "webrtc/modules/video_coding/main/source/internal_defines.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -42,9 +42,8 @@ VCMExtDecoderMapItem::VCMExtDecoderMapItem(
internal_render_timing(internal_render_timing) {
}
-VCMCodecDataBase::VCMCodecDataBase(int id)
- : id_(id),
- number_of_cores_(0),
+VCMCodecDataBase::VCMCodecDataBase()
+ : number_of_cores_(0),
max_payload_size_(kDefaultPayloadSize),
periodic_key_frames_(false),
pending_encoder_reset_(true),
@@ -58,8 +57,7 @@ VCMCodecDataBase::VCMCodecDataBase(int id)
ptr_decoder_(NULL),
current_dec_is_external_(false),
dec_map_(),
- dec_external_map_() {
-}
+ dec_external_map_() {}
VCMCodecDataBase::~VCMCodecDataBase() {
ResetSender();
@@ -160,7 +158,7 @@ bool VCMCodecDataBase::SetSendCodec(
if (max_payload_size <= 0) {
max_payload_size = kDefaultPayloadSize;
}
- if (number_of_cores <= 0 || number_of_cores > 32) {
+ if (number_of_cores <= 0) {
return false;
}
if (send_codec->plType <= 0) {
@@ -221,24 +219,14 @@ bool VCMCodecDataBase::SetSendCodec(
} else {
ptr_encoder_ = CreateEncoder(send_codec->codecType);
current_enc_is_external_ = false;
+ if (!ptr_encoder_) {
+ return false;
+ }
}
encoded_frame_callback->SetPayloadType(send_codec->plType);
- if (!ptr_encoder_) {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideoCoding,
- VCMId(id_),
- "Failed to create encoder: %s.",
- send_codec->plName);
- return false;
- }
if (ptr_encoder_->InitEncode(send_codec,
number_of_cores_,
max_payload_size_) < 0) {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideoCoding,
- VCMId(id_),
- "Failed to initialize encoder: %s.",
- send_codec->plName);
DeleteEncoder();
return false;
} else if (ptr_encoder_->RegisterEncodeCallback(encoded_frame_callback) < 0) {
@@ -257,8 +245,6 @@ bool VCMCodecDataBase::SetSendCodec(
}
bool VCMCodecDataBase::SendCodec(VideoCodec* current_send_codec) const {
- WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideoCoding, VCMId(id_),
- "SendCodec");
if (!ptr_encoder_) {
return false;
}
@@ -267,8 +253,6 @@ bool VCMCodecDataBase::SendCodec(VideoCodec* current_send_codec) const {
}
VideoCodecType VCMCodecDataBase::SendCodec() const {
- WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideoCoding, VCMId(id_),
- "SendCodec type");
if (!ptr_encoder_) {
return kVideoCodecUnknown;
}
@@ -338,12 +322,6 @@ bool VCMCodecDataBase::RequiresEncoderReset(const VideoCodec& new_send_codec) {
}
break;
case kVideoCodecGeneric:
- if (memcmp(&new_send_codec.codecSpecific.Generic,
- &send_codec_.codecSpecific.Generic,
- sizeof(new_send_codec.codecSpecific.Generic)) !=
- 0) {
- return true;
- }
break;
// Known codecs without payload-specifics
case kVideoCodecI420:
@@ -404,7 +382,11 @@ bool VCMCodecDataBase::DeregisterExternalDecoder(uint8_t payload_type) {
// Not found
return false;
}
- if (receive_codec_.plType == payload_type) {
+ // We can't use payload_type to check if the decoder is currently in use,
+ // because payload type may be out of date (e.g. before we decode the first
+ // frame after RegisterReceiveCodec)
+ if (ptr_decoder_ != NULL &&
+ &ptr_decoder_->_decoder == (*it).second->external_decoder_instance) {
// Release it if it was registered and in use.
ReleaseDecoder(ptr_decoder_);
ptr_decoder_ = NULL;
@@ -443,12 +425,6 @@ bool VCMCodecDataBase::RegisterReceiveCodec(
if (number_of_cores < 0) {
return false;
}
- WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCoding, VCMId(id_),
- "Codec: %s, Payload type %d, Height %d, Width %d, Bitrate %d,"
- "Framerate %d.",
- receive_codec->plName, receive_codec->plType,
- receive_codec->height, receive_codec->width,
- receive_codec->startBitrate, receive_codec->maxFramerate);
// Check if payload value already exists, if so - erase old and insert new.
DeregisterReceiveCodec(receive_codec->plType);
if (receive_codec->codecType == kVideoCodecUnknown) {
@@ -530,7 +506,7 @@ VCMGenericDecoder* VCMCodecDataBase::CreateDecoderCopy() const {
if (!decoder_copy) {
return NULL;
}
- return new VCMGenericDecoder(*decoder_copy, id_, ptr_decoder_->External());
+ return new VCMGenericDecoder(*decoder_copy, ptr_decoder_->External());
}
void VCMCodecDataBase::ReleaseDecoder(VCMGenericDecoder* decoder) const {
@@ -549,8 +525,7 @@ void VCMCodecDataBase::CopyDecoder(const VCMGenericDecoder& decoder) {
if (decoder_copy) {
VCMDecodedFrameCallback* cb = ptr_decoder_->_callback;
ReleaseDecoder(ptr_decoder_);
- ptr_decoder_ = new VCMGenericDecoder(*decoder_copy, id_,
- decoder.External());
+ ptr_decoder_ = new VCMGenericDecoder(*decoder_copy, decoder.External());
if (cb && ptr_decoder_->RegisterDecodeCompleteCallback(cb)) {
assert(false);
}
@@ -575,8 +550,8 @@ VCMGenericDecoder* VCMCodecDataBase::CreateAndInitDecoder(
assert(new_codec);
const VCMDecoderMapItem* decoder_item = FindDecoderItem(payload_type);
if (!decoder_item) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, VCMId(id_),
- "Unknown payload type: %u", payload_type);
+ LOG(LS_ERROR) << "Can't find a decoder associated with payload type: "
+ << payload_type;
return NULL;
}
VCMGenericDecoder* ptr_decoder = NULL;
@@ -585,7 +560,7 @@ VCMGenericDecoder* VCMCodecDataBase::CreateAndInitDecoder(
if (external_dec_item) {
// External codec.
ptr_decoder = new VCMGenericDecoder(
- *external_dec_item->external_decoder_instance, id_, true);
+ *external_dec_item->external_decoder_instance, true);
*external = true;
} else {
// Create decoder.
@@ -617,6 +592,7 @@ VCMGenericEncoder* VCMCodecDataBase::CreateEncoder(
return new VCMGenericEncoder(*(new I420Encoder));
#endif
default:
+ LOG(LS_WARNING) << "No internal encoder of this type exists.";
return NULL;
}
}
@@ -636,11 +612,11 @@ VCMGenericDecoder* VCMCodecDataBase::CreateDecoder(VideoCodecType type) const {
switch (type) {
#ifdef VIDEOCODEC_VP8
case kVideoCodecVP8:
- return new VCMGenericDecoder(*(VP8Decoder::Create()), id_);
+ return new VCMGenericDecoder(*(VP8Decoder::Create()));
#endif
#ifdef VIDEOCODEC_I420
case kVideoCodecI420:
- return new VCMGenericDecoder(*(new I420Decoder), id_);
+ return new VCMGenericDecoder(*(new I420Decoder));
#endif
default:
return NULL;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.h b/chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.h
index 2a28ed4fb87..f27218f61cf 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/codec_database.h
@@ -50,7 +50,7 @@ struct VCMExtDecoderMapItem {
class VCMCodecDataBase {
public:
- explicit VCMCodecDataBase(int id);
+ VCMCodecDataBase();
~VCMCodecDataBase();
// Sender Side
@@ -174,7 +174,6 @@ class VCMCodecDataBase {
const VCMExtDecoderMapItem* FindExternalDecoderItem(
uint8_t payload_type) const;
- int id_;
int number_of_cores_;
int max_payload_size_;
bool periodic_key_frames_;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.cc
index 6760762c9a5..3ccf0b0fd99 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.cc
@@ -149,17 +149,12 @@ const RTPFragmentationHeader* VCMEncodedFrame::FragmentationHeader() const {
return &_fragmentation;
}
-int32_t
-VCMEncodedFrame::VerifyAndAllocate(const uint32_t minimumSize)
+void VCMEncodedFrame::VerifyAndAllocate(const uint32_t minimumSize)
{
if(minimumSize > _size)
{
// create buffer of sufficient size
uint8_t* newBuffer = new uint8_t[minimumSize];
- if (newBuffer == NULL)
- {
- return -1;
- }
if(_buffer)
{
// copy old data
@@ -169,7 +164,6 @@ VCMEncodedFrame::VerifyAndAllocate(const uint32_t minimumSize)
_buffer = newBuffer;
_size = minimumSize;
}
- return 0;
}
webrtc::FrameType VCMEncodedFrame::ConvertFrameType(VideoFrameType frameType)
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.h b/chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.h
index 3e73be51803..dd0f843d267 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/encoded_frame.h
@@ -104,7 +104,7 @@ protected:
* is copied to the new buffer.
* Buffer size is updated to minimumSize.
*/
- int32_t VerifyAndAllocate(const uint32_t minimumSize);
+ void VerifyAndAllocate(const uint32_t minimumSize);
void Reset();
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/frame_buffer.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/frame_buffer.cc
index 531c7ac112d..fce68fb32d5 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/frame_buffer.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/frame_buffer.cc
@@ -14,6 +14,7 @@
#include <string.h>
#include "webrtc/modules/video_coding/main/source/packet.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -86,20 +87,7 @@ VCMFrameBuffer::InsertPacket(const VCMPacket& packet,
int64_t timeInMs,
VCMDecodeErrorMode decode_error_mode,
const FrameData& frame_data) {
- // Is this packet part of this frame?
- if (TimeStamp() && (TimeStamp() != packet.timestamp)) {
- return kTimeStampError;
- }
-
- // sanity checks
- if (_size + packet.sizeBytes +
- (packet.insertStartCode ? kH264StartCodeLengthBytes : 0 )
- > kMaxJBFrameSizeBytes) {
- return kSizeError;
- }
- if (NULL == packet.dataPtr && packet.sizeBytes > 0) {
- return kSizeError;
- }
+ assert(!(NULL == packet.dataPtr && packet.sizeBytes > 0));
if (packet.dataPtr != NULL) {
_payloadType = packet.payloadType;
}
@@ -108,6 +96,8 @@ VCMFrameBuffer::InsertPacket(const VCMPacket& packet,
// First packet (empty and/or media) inserted into this frame.
// store some info and set some initial values.
_timeStamp = packet.timestamp;
+ // We only take the ntp timestamp of the first packet of a frame.
+ ntp_time_ms_ = packet.ntp_time_ms_;
_codec = packet.codec;
if (packet.frameType != kFrameEmpty) {
// first media packet
@@ -126,11 +116,11 @@ VCMFrameBuffer::InsertPacket(const VCMPacket& packet,
const uint32_t newSize = _size +
increments * kBufferIncStepSizeBytes;
if (newSize > kMaxJBFrameSizeBytes) {
+ LOG(LS_ERROR) << "Failed to insert packet due to frame being too "
+ "big.";
return kSizeError;
}
- if (VerifyAndAllocate(newSize) == -1) {
- return kSizeError;
- }
+ VerifyAndAllocate(newSize);
_sessionInfo.UpdateDataPointers(prevBuffer, _buffer);
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.cc
index 50b1eda70fc..cb0faf9a901 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.cc
@@ -12,8 +12,7 @@
#include "webrtc/modules/video_coding/main/source/generic_decoder.h"
#include "webrtc/modules/video_coding/main/source/internal_defines.h"
#include "webrtc/system_wrappers/interface/clock.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-#include "webrtc/system_wrappers/interface/trace_event.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -59,10 +58,11 @@ int32_t VCMDecodedFrameCallback::Decoded(I420VideoFrame& decodedImage)
_timestampMap.Pop(decodedImage.timestamp()));
callback = _receiveCallback;
}
- if (frameInfo == NULL)
- {
- // The map should never be empty or full if this callback is called.
- return WEBRTC_VIDEO_CODEC_ERROR;
+
+ if (frameInfo == NULL) {
+ LOG(LS_WARNING) << "Too many frames backed up in the decoder, dropping "
+ "this one.";
+ return WEBRTC_VIDEO_CODEC_OK;
}
_timing.StopDecodeTimer(
@@ -73,14 +73,7 @@ int32_t VCMDecodedFrameCallback::Decoded(I420VideoFrame& decodedImage)
if (callback != NULL)
{
decodedImage.set_render_time_ms(frameInfo->renderTimeMs);
- int32_t callbackReturn = callback->FrameToRender(decodedImage);
- if (callbackReturn < 0)
- {
- WEBRTC_TRACE(webrtc::kTraceDebug,
- webrtc::kTraceVideoCoding,
- -1,
- "Render callback returned error: %d", callbackReturn);
- }
+ callback->FrameToRender(decodedImage);
}
return WEBRTC_VIDEO_CODEC_OK;
}
@@ -125,15 +118,15 @@ int32_t VCMDecodedFrameCallback::Pop(uint32_t timestamp)
return VCM_OK;
}
-VCMGenericDecoder::VCMGenericDecoder(VideoDecoder& decoder, int32_t id, bool isExternal)
+VCMGenericDecoder::VCMGenericDecoder(VideoDecoder& decoder, bool isExternal)
:
-_id(id),
_callback(NULL),
_frameInfos(),
_nextFrameInfoIdx(0),
_decoder(decoder),
_codecType(kVideoCodecUnknown),
-_isExternal(isExternal)
+_isExternal(isExternal),
+_keyFrameDecoded(false)
{
}
@@ -156,11 +149,6 @@ int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame,
_frameInfos[_nextFrameInfoIdx].renderTimeMs = frame.RenderTimeMs();
_callback->Map(frame.TimeStamp(), &_frameInfos[_nextFrameInfoIdx]);
- WEBRTC_TRACE(webrtc::kTraceDebug,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Decoding timestamp %u", frame.TimeStamp());
-
_nextFrameInfoIdx = (_nextFrameInfoIdx + 1) % kDecoderFrameMemoryLength;
int32_t ret = _decoder.Decode(frame.EncodedImage(),
frame.MissingFrame(),
@@ -170,7 +158,8 @@ int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame,
if (ret < WEBRTC_VIDEO_CODEC_OK)
{
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, VCMId(_id), "Decoder error: %d\n", ret);
+ LOG(LS_WARNING) << "Failed to decode frame with timestamp "
+ << frame.TimeStamp() << ", error code: " << ret;
_callback->Pop(frame.TimeStamp());
return ret;
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.h b/chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.h
index e1993fbb906..846d4d3e111 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/generic_decoder.h
@@ -63,7 +63,7 @@ class VCMGenericDecoder
{
friend class VCMCodecDataBase;
public:
- VCMGenericDecoder(VideoDecoder& decoder, int32_t id = 0, bool isExternal = false);
+ VCMGenericDecoder(VideoDecoder& decoder, bool isExternal = false);
~VCMGenericDecoder();
/**
@@ -105,17 +105,14 @@ public:
bool External() const;
-protected:
-
- int32_t _id;
+private:
VCMDecodedFrameCallback* _callback;
VCMFrameInformation _frameInfos[kDecoderFrameMemoryLength];
- uint32_t _nextFrameInfoIdx;
+ uint32_t _nextFrameInfoIdx;
VideoDecoder& _decoder;
VideoCodecType _codecType;
bool _isExternal;
bool _keyFrameDecoded;
-
};
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.cc
index 064470b1667..6fb2c9f81b7 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.cc
@@ -13,8 +13,42 @@
#include "webrtc/modules/video_coding/main/source/generic_encoder.h"
#include "webrtc/modules/video_coding/main/source/media_optimization.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
+namespace {
+// Map information from info into rtp. If no relevant information is found
+// in info, rtp is set to NULL.
+void CopyCodecSpecific(const CodecSpecificInfo* info, RTPVideoHeader** rtp) {
+ if (!info) {
+ *rtp = NULL;
+ return;
+ }
+ switch (info->codecType) {
+ case kVideoCodecVP8: {
+ (*rtp)->codec = kRtpVideoVp8;
+ (*rtp)->codecHeader.VP8.InitRTPVideoHeaderVP8();
+ (*rtp)->codecHeader.VP8.pictureId = info->codecSpecific.VP8.pictureId;
+ (*rtp)->codecHeader.VP8.nonReference =
+ info->codecSpecific.VP8.nonReference;
+ (*rtp)->codecHeader.VP8.temporalIdx = info->codecSpecific.VP8.temporalIdx;
+ (*rtp)->codecHeader.VP8.layerSync = info->codecSpecific.VP8.layerSync;
+ (*rtp)->codecHeader.VP8.tl0PicIdx = info->codecSpecific.VP8.tl0PicIdx;
+ (*rtp)->codecHeader.VP8.keyIdx = info->codecSpecific.VP8.keyIdx;
+ (*rtp)->simulcastIdx = info->codecSpecific.VP8.simulcastIdx;
+ return;
+ }
+ case kVideoCodecGeneric:
+ (*rtp)->codec = kRtpVideoGeneric;
+ (*rtp)->simulcastIdx = info->codecSpecific.generic.simulcast_idx;
+ return;
+ default:
+ // No codec specific info. Change RTP header pointer to NULL.
+ *rtp = NULL;
+ return;
+ }
+}
+} // namespace
//#define DEBUG_ENCODER_BIT_STREAM
@@ -50,11 +84,12 @@ VCMGenericEncoder::InitEncode(const VideoCodec* settings,
_bitRate = settings->startBitrate * 1000;
_frameRate = settings->maxFramerate;
_codecType = settings->codecType;
- if (_VCMencodedFrameCallback != NULL)
- {
- _VCMencodedFrameCallback->SetCodecType(_codecType);
+ if (_encoder.InitEncode(settings, numberOfCores, maxPayloadSize) != 0) {
+ LOG(LS_ERROR) << "Failed to initialize the encoder associated with "
+ "payload name: " << settings->plName;
+ return -1;
}
- return _encoder.InitEncode(settings, numberOfCores, maxPayloadSize);
+ return 0;
}
int32_t
@@ -127,8 +162,6 @@ int32_t
VCMGenericEncoder::RegisterEncodeCallback(VCMEncodedFrameCallback* VCMencodedFrameCallback)
{
_VCMencodedFrameCallback = VCMencodedFrameCallback;
-
- _VCMencodedFrameCallback->SetCodecType(_codecType);
_VCMencodedFrameCallback->SetInternalSource(_internalSource);
return _encoder.RegisterEncodeCompleteCallback(_VCMencodedFrameCallback);
}
@@ -142,15 +175,13 @@ VCMGenericEncoder::InternalSource() const
/***************************
* Callback Implementation
***************************/
-VCMEncodedFrameCallback::VCMEncodedFrameCallback():
+VCMEncodedFrameCallback::VCMEncodedFrameCallback(
+ EncodedImageCallback* post_encode_callback):
_sendCallback(),
_mediaOpt(NULL),
-_encodedBytes(0),
_payloadType(0),
-_codecType(kVideoCodecUnknown),
_internalSource(false),
-post_encode_callback_lock_(CriticalSectionWrapper::CreateCriticalSection()),
-post_encode_callback_(NULL)
+post_encode_callback_(post_encode_callback)
#ifdef DEBUG_ENCODER_BIT_STREAM
, _bitStreamAfterEncoder(NULL)
#endif
@@ -180,12 +211,8 @@ VCMEncodedFrameCallback::Encoded(
const CodecSpecificInfo* codecSpecificInfo,
const RTPFragmentationHeader* fragmentationHeader)
{
- {
- CriticalSectionScoped cs(post_encode_callback_lock_.get());
- if (post_encode_callback_) {
- post_encode_callback_->Encoded(encodedImage);
- }
- }
+ post_encode_callback_->Encoded(encodedImage);
+
FrameType frameType = VCMEncodedFrame::ConvertFrameType(encodedImage._frameType);
uint32_t encodedBytes = 0;
@@ -202,14 +229,7 @@ VCMEncodedFrameCallback::Encoded(
RTPVideoHeader rtpVideoHeader;
RTPVideoHeader* rtpVideoHeaderPtr = &rtpVideoHeader;
- if (codecSpecificInfo)
- {
- CopyCodecSpecific(*codecSpecificInfo, &rtpVideoHeaderPtr);
- }
- else
- {
- rtpVideoHeaderPtr = NULL;
- }
+ CopyCodecSpecific(codecSpecificInfo, &rtpVideoHeaderPtr);
int32_t callbackReturn = _sendCallback->SendData(
frameType,
@@ -229,9 +249,8 @@ VCMEncodedFrameCallback::Encoded(
{
return VCM_UNINITIALIZED;
}
- _encodedBytes = encodedBytes;
if (_mediaOpt != NULL) {
- _mediaOpt->UpdateWithEncodedData(_encodedBytes, encodedImage._timeStamp,
+ _mediaOpt->UpdateWithEncodedData(encodedBytes, encodedImage._timeStamp,
frameType);
if (_internalSource)
{
@@ -241,12 +260,6 @@ VCMEncodedFrameCallback::Encoded(
return VCM_OK;
}
-uint32_t
-VCMEncodedFrameCallback::EncodedBytes()
-{
- return _encodedBytes;
-}
-
void
VCMEncodedFrameCallback::SetMediaOpt(
media_optimization::MediaOptimization *mediaOpt)
@@ -254,36 +267,4 @@ VCMEncodedFrameCallback::SetMediaOpt(
_mediaOpt = mediaOpt;
}
-void VCMEncodedFrameCallback::CopyCodecSpecific(const CodecSpecificInfo& info,
- RTPVideoHeader** rtp) {
- switch (info.codecType) {
- case kVideoCodecVP8: {
- (*rtp)->codec = kRtpVideoVp8;
- (*rtp)->codecHeader.VP8.InitRTPVideoHeaderVP8();
- (*rtp)->codecHeader.VP8.pictureId = info.codecSpecific.VP8.pictureId;
- (*rtp)->codecHeader.VP8.nonReference =
- info.codecSpecific.VP8.nonReference;
- (*rtp)->codecHeader.VP8.temporalIdx = info.codecSpecific.VP8.temporalIdx;
- (*rtp)->codecHeader.VP8.layerSync = info.codecSpecific.VP8.layerSync;
- (*rtp)->codecHeader.VP8.tl0PicIdx = info.codecSpecific.VP8.tl0PicIdx;
- (*rtp)->codecHeader.VP8.keyIdx = info.codecSpecific.VP8.keyIdx;
- (*rtp)->simulcastIdx = info.codecSpecific.VP8.simulcastIdx;
- return;
- }
- case kVideoCodecGeneric:
- (*rtp)->codec = kRtpVideoGeneric;
- (*rtp)->simulcastIdx = info.codecSpecific.generic.simulcast_idx;
- return;
- default:
- // No codec specific info. Change RTP header pointer to NULL.
- *rtp = NULL;
- return;
- }
-}
-
-void VCMEncodedFrameCallback::RegisterPostEncodeImageCallback(
- EncodedImageCallback* callback) {
- CriticalSectionScoped cs(post_encode_callback_lock_.get());
- post_encode_callback_ = callback;
-}
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.h b/chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.h
index c5cfeabc26f..9277260af34 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/generic_encoder.h
@@ -17,9 +17,7 @@
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-namespace webrtc
-{
-
+namespace webrtc {
class CriticalSectionWrapper;
namespace media_optimization {
@@ -32,7 +30,7 @@ class MediaOptimization;
class VCMEncodedFrameCallback : public EncodedImageCallback
{
public:
- VCMEncodedFrameCallback();
+ VCMEncodedFrameCallback(EncodedImageCallback* post_encode_callback);
virtual ~VCMEncodedFrameCallback();
/*
@@ -43,10 +41,6 @@ public:
const CodecSpecificInfo* codecSpecificInfo = NULL,
const RTPFragmentationHeader* fragmentationHeader = NULL);
/*
- * Get number of encoded bytes
- */
- uint32_t EncodedBytes();
- /*
* Callback implementation - generic encoder encode complete
*/
int32_t SetTransportCallback(VCMPacketizationCallback* transport);
@@ -56,27 +50,14 @@ public:
void SetMediaOpt (media_optimization::MediaOptimization* mediaOpt);
void SetPayloadType(uint8_t payloadType) { _payloadType = payloadType; };
- void SetCodecType(VideoCodecType codecType) {_codecType = codecType;};
void SetInternalSource(bool internalSource) { _internalSource = internalSource; };
- void RegisterPostEncodeImageCallback(EncodedImageCallback* callback);
-
private:
- /*
- * Map information from info into rtp. If no relevant information is found
- * in info, rtp is set to NULL.
- */
- static void CopyCodecSpecific(const CodecSpecificInfo& info,
- RTPVideoHeader** rtp);
-
VCMPacketizationCallback* _sendCallback;
media_optimization::MediaOptimization* _mediaOpt;
- uint32_t _encodedBytes;
uint8_t _payloadType;
- VideoCodecType _codecType;
bool _internalSource;
- scoped_ptr<CriticalSectionWrapper> post_encode_callback_lock_;
EncodedImageCallback* post_encode_callback_;
#ifdef DEBUG_ENCODER_BIT_STREAM
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.cc
index f11f81b46f2..d8792f21afb 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.cc
@@ -25,7 +25,6 @@
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/logging.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
@@ -77,10 +76,6 @@ int FrameList::RecycleFramesUntilKeyFrame(FrameList::iterator* key_frame_it,
FrameList::iterator it = begin();
while (!empty()) {
// Throw at least one frame.
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding, -1,
- "Recycling: type=%s, low seqnum=%u",
- it->second->FrameType() == kVideoFrameKey ?
- "key" : "delta", it->second->GetLowSeqNum());
it->second->Reset();
free_frames->push_back(it->second);
erase(it++);
@@ -128,16 +123,10 @@ void FrameList::Reset(UnorderedFrameList* free_frames) {
}
VCMJitterBuffer::VCMJitterBuffer(Clock* clock,
- EventFactory* event_factory,
- int vcm_id,
- int receiver_id,
- bool master)
- : vcm_id_(vcm_id),
- receiver_id_(receiver_id),
- clock_(clock),
+ EventFactory* event_factory)
+ : clock_(clock),
running_(false),
crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- master_(master),
frame_event_(event_factory->CreateEvent()),
packet_event_(event_factory->CreateEvent()),
max_number_of_frames_(kStartNumberOfFrames),
@@ -156,7 +145,7 @@ VCMJitterBuffer::VCMJitterBuffer(Clock* clock,
num_consecutive_old_frames_(0),
num_consecutive_old_packets_(0),
num_discarded_packets_(0),
- jitter_estimate_(vcm_id, receiver_id),
+ jitter_estimate_(),
inter_frame_delay_(clock_->TimeInMilliseconds()),
rtt_ms_(kDefaultRtt),
nack_mode_(kNoNack),
@@ -192,10 +181,7 @@ void VCMJitterBuffer::CopyFrom(const VCMJitterBuffer& rhs) {
if (this != &rhs) {
crit_sect_->Enter();
rhs.crit_sect_->Enter();
- vcm_id_ = rhs.vcm_id_;
- receiver_id_ = rhs.receiver_id_;
running_ = rhs.running_;
- master_ = !rhs.master_;
max_number_of_frames_ = rhs.max_number_of_frames_;
incoming_frame_rate_ = rhs.incoming_frame_rate_;
incoming_frame_count_ = rhs.incoming_frame_count_;
@@ -276,10 +262,6 @@ void VCMJitterBuffer::Start() {
first_packet_since_reset_ = true;
rtt_ms_ = kDefaultRtt;
last_decoded_state_.Reset();
-
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_), "JB(0x%x): Jitter buffer: start",
- this);
}
void VCMJitterBuffer::Stop() {
@@ -300,9 +282,6 @@ void VCMJitterBuffer::Stop() {
// Make sure we wake up any threads waiting on these events.
frame_event_->Set();
packet_event_->Set();
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_), "JB(0x%x): Jitter buffer: stop",
- this);
}
bool VCMJitterBuffer::Running() const {
@@ -327,9 +306,6 @@ void VCMJitterBuffer::Flush() {
waiting_for_completion_.latest_packet_time = -1;
first_packet_since_reset_ = true;
missing_sequence_numbers_.clear();
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_), "JB(0x%x): Jitter buffer: flush",
- this);
}
// Get received key and delta frames
@@ -583,6 +559,8 @@ VCMFrameBufferEnum VCMJitterBuffer::GetFrame(const VCMPacket& packet,
DropPacketsFromNackList(last_decoded_state_.sequence_num());
if (num_consecutive_old_packets_ > kMaxConsecutiveOldPackets) {
+ LOG(LS_WARNING) << num_consecutive_old_packets_ << " consecutive old "
+ "packets received. Flushing the jitter buffer.";
Flush();
return kFlushIndicator;
}
@@ -602,13 +580,13 @@ VCMFrameBufferEnum VCMJitterBuffer::GetFrame(const VCMPacket& packet,
VCMFrameBufferEnum ret = kNoError;
if (!*frame) {
// No free frame! Try to reclaim some...
- LOG_F(LS_INFO) << "Unable to get empty frame; Recycling.";
+ LOG(LS_WARNING) << "Unable to get empty frame; Recycling.";
bool found_key_frame = RecycleFramesUntilKeyFrame();
*frame = GetEmptyFrame();
- if (!*frame)
- return kGeneralError;
- else if (!found_key_frame)
+ assert(*frame);
+ if (!found_key_frame) {
ret = kFlushIndicator;
+ }
}
(*frame)->Reset();
return ret;
@@ -650,6 +628,8 @@ VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(const VCMPacket& packet,
// Flush if this happens consistently.
num_consecutive_old_frames_++;
if (num_consecutive_old_frames_ > kMaxConsecutiveOldFrames) {
+ LOG(LS_WARNING) << num_consecutive_old_packets_ << " consecutive old "
+ "frames received. Flushing the jitter buffer.";
Flush();
return kFlushIndicator;
}
@@ -702,8 +682,8 @@ VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(const VCMPacket& packet,
if (IsPacketRetransmitted(packet)) {
frame->IncrementNackCount();
}
- if (!UpdateNackList(packet.seqNum)) {
- LOG_F(LS_INFO) << "Requesting key frame due to flushed NACK list.";
+ if (!UpdateNackList(packet.seqNum) &&
+ packet.frameType != kVideoFrameKey) {
buffer_return = kFlushIndicator;
}
latest_received_sequence_number_ = LatestSequenceNumber(
@@ -725,15 +705,6 @@ VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(const VCMPacket& packet,
}
case kCompleteSession: {
if (update_decodable_list) {
- if (master_) {
- // Only trace the primary jitter buffer to make it possible to parse
- // and plot the trace file.
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "JB(0x%x) FB(0x%x): Complete frame added to jitter"
- "buffer, size:%d type %d",
- this, frame, frame->Length(), frame->FrameType());
- }
CountFrame(*frame);
frame->SetCountedFrame(true);
if (continuous) {
@@ -960,8 +931,6 @@ uint16_t* VCMJitterBuffer::GetNackList(uint16_t* nack_list_size,
incomplete_frames_.begin(), incomplete_frames_.end(),
HasNonEmptyState);
}
- if (have_non_empty_frame)
- LOG_F(LS_INFO) << "First frame is not key; Recycling.";
bool found_key_frame = RecycleFramesUntilKeyFrame();
if (!found_key_frame) {
*request_key_frame = have_non_empty_frame;
@@ -977,9 +946,9 @@ uint16_t* VCMJitterBuffer::GetNackList(uint16_t* nack_list_size,
int non_continuous_incomplete_duration =
NonContinuousOrIncompleteDuration();
if (non_continuous_incomplete_duration > 90 * max_incomplete_time_ms_) {
- LOG_F(LS_INFO) << "Too long non-decodable duration: " <<
- non_continuous_incomplete_duration << " > " <<
- 90 * max_incomplete_time_ms_;
+ LOG_F(LS_WARNING) << "Too long non-decodable duration: "
+ << non_continuous_incomplete_duration << " > "
+ << 90 * max_incomplete_time_ms_;
FrameList::reverse_iterator rit = find_if(incomplete_frames_.rbegin(),
incomplete_frames_.rend(), IsKeyFrame);
if (rit == incomplete_frames_.rend()) {
@@ -1038,10 +1007,12 @@ bool VCMJitterBuffer::UpdateNackList(uint16_t sequence_number) {
TRACE_EVENT_INSTANT1("webrtc", "AddNack", "seqnum", i);
}
if (TooLargeNackList() && !HandleTooLargeNackList()) {
+ LOG(LS_WARNING) << "Requesting key frame due to too large NACK list.";
return false;
}
if (MissingTooOldPacket(sequence_number) &&
!HandleTooOldPackets(sequence_number)) {
+ LOG(LS_WARNING) << "Requesting key frame due to missing too old packets";
return false;
}
} else {
@@ -1058,8 +1029,9 @@ bool VCMJitterBuffer::TooLargeNackList() const {
bool VCMJitterBuffer::HandleTooLargeNackList() {
// Recycle frames until the NACK list is small enough. It is likely cheaper to
// request a key frame than to retransmit this many missing packets.
- LOG_F(LS_INFO) << "NACK list has grown too large: " <<
- missing_sequence_numbers_.size() << " > " << max_nack_list_size_;
+ LOG_F(LS_WARNING) << "NACK list has grown too large: "
+ << missing_sequence_numbers_.size() << " > "
+ << max_nack_list_size_;
bool key_frame_found = false;
while (TooLargeNackList()) {
key_frame_found = RecycleFramesUntilKeyFrame();
@@ -1083,8 +1055,9 @@ bool VCMJitterBuffer::HandleTooOldPackets(uint16_t latest_sequence_number) {
bool key_frame_found = false;
const uint16_t age_of_oldest_missing_packet = latest_sequence_number -
*missing_sequence_numbers_.begin();
- LOG_F(LS_INFO) << "NACK list contains too old sequence numbers: " <<
- age_of_oldest_missing_packet << " > " << max_packet_age_to_nack_;
+ LOG_F(LS_WARNING) << "NACK list contains too old sequence numbers: "
+ << age_of_oldest_missing_packet << " > "
+ << max_packet_age_to_nack_;
while (MissingTooOldPacket(latest_sequence_number)) {
key_frame_found = RecycleFramesUntilKeyFrame();
}
@@ -1136,10 +1109,6 @@ bool VCMJitterBuffer::TryToIncreaseJitterBufferSize() {
frame_buffers_[max_number_of_frames_] = new_frame;
free_frames_.push_back(new_frame);
++max_number_of_frames_;
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "JB(0x%x) FB(0x%x): Jitter buffer increased to:%d frames",
- this, new_frame, max_number_of_frames_);
TRACE_COUNTER1("webrtc", "JBMaxFrames", max_number_of_frames_);
return true;
}
@@ -1161,13 +1130,9 @@ bool VCMJitterBuffer::RecycleFramesUntilKeyFrame() {
key_frame_found = key_frame_it != decodable_frames_.end();
}
drop_count_ += dropped_frames;
- if (dropped_frames) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "Jitter buffer drop count:%u", drop_count_);
- }
TRACE_EVENT_INSTANT0("webrtc", "JB::RecycleFramesUntilKeyFrame");
if (key_frame_found) {
+ LOG(LS_INFO) << "Found key frame while dropping frames.";
// Reset last decoded state to make sure the next frame decoded is a key
// frame, and start NACKing from here.
last_decoded_state_.Reset();
@@ -1246,19 +1211,6 @@ void VCMJitterBuffer::UpdateJitterEstimate(const VCMJitterSample& sample,
if (sample.latest_packet_time == -1) {
return;
}
- if (incomplete_frame) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_), "Received incomplete frame "
- "timestamp %u frame size %u at time %u",
- sample.timestamp, sample.frame_size,
- MaskWord64ToUWord32(sample.latest_packet_time));
- } else {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_), "Received complete frame "
- "timestamp %u frame size %u at time %u",
- sample.timestamp, sample.frame_size,
- MaskWord64ToUWord32(sample.latest_packet_time));
- }
UpdateJitterEstimate(sample.latest_packet_time, sample.timestamp,
sample.frame_size, incomplete_frame);
}
@@ -1273,23 +1225,6 @@ void VCMJitterBuffer::UpdateJitterEstimate(const VCMFrameBuffer& frame,
}
// No retransmitted frames should be a part of the jitter
// estimate.
- if (incomplete_frame) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "Received incomplete frame timestamp %u frame type %d "
- "frame size %u at time %u, jitter estimate was %u",
- frame.TimeStamp(), frame.FrameType(), frame.Length(),
- MaskWord64ToUWord32(frame.LatestPacketTimeMs()),
- EstimatedJitterMs());
- } else {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_), "Received complete frame "
- "timestamp %u frame type %d frame size %u at time %u, "
- "jitter estimate was %u",
- frame.TimeStamp(), frame.FrameType(), frame.Length(),
- MaskWord64ToUWord32(frame.LatestPacketTimeMs()),
- EstimatedJitterMs());
- }
UpdateJitterEstimate(frame.LatestPacketTimeMs(), frame.TimeStamp(),
frame.Length(), incomplete_frame);
}
@@ -1306,12 +1241,6 @@ void VCMJitterBuffer::UpdateJitterEstimate(
return;
}
int64_t frame_delay;
- // Calculate the delay estimate
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "Packet received and sent to jitter estimate with: "
- "timestamp=%u wall_clock=%u", timestamp,
- MaskWord64ToUWord32(latest_packet_time_ms));
bool not_reordered = inter_frame_delay_.CalculateDelay(timestamp,
&frame_delay,
latest_packet_time_ms);
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.h b/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.h
index 8586f115f88..6ed9cfb85c6 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer.h
@@ -16,6 +16,7 @@
#include <set>
#include <vector>
+#include "webrtc/base/constructormagic.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
@@ -23,7 +24,6 @@
#include "webrtc/modules/video_coding/main/source/inter_frame_delay.h"
#include "webrtc/modules/video_coding/main/source/jitter_buffer_common.h"
#include "webrtc/modules/video_coding/main/source/jitter_estimator.h"
-#include "webrtc/system_wrappers/interface/constructor_magic.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/typedefs.h"
@@ -77,10 +77,7 @@ class FrameList
class VCMJitterBuffer {
public:
VCMJitterBuffer(Clock* clock,
- EventFactory* event_factory,
- int vcm_id,
- int receiver_id,
- bool master);
+ EventFactory* event_factory);
virtual ~VCMJitterBuffer();
// Makes |this| a deep copy of |rhs|.
@@ -274,13 +271,10 @@ class VCMJitterBuffer {
uint16_t EstimatedLowSequenceNumber(const VCMFrameBuffer& frame) const;
- int vcm_id_;
- int receiver_id_;
Clock* clock_;
// If we are running (have started) or not.
bool running_;
CriticalSectionWrapper* crit_sect_;
- bool master_;
// Event to signal when we have a frame ready for decoder.
scoped_ptr<EventWrapper> frame_event_;
// Event to signal when we have received a packet.
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc
index e535a8a4043..0490658b420 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_buffer_unittest.cc
@@ -27,8 +27,8 @@ class TestBasicJitterBuffer : public ::testing::Test {
protected:
virtual void SetUp() {
clock_.reset(new SimulatedClock(0));
- jitter_buffer_.reset(new VCMJitterBuffer(clock_.get(),
- &event_factory_, -1, -1, true));
+ jitter_buffer_.reset(
+ new VCMJitterBuffer(clock_.get(), &event_factory_));
jitter_buffer_->Start();
seq_num_ = 1234;
timestamp_ = 0;
@@ -126,8 +126,7 @@ class TestRunningJitterBuffer : public ::testing::Test {
clock_.reset(new SimulatedClock(0));
max_nack_list_size_ = 150;
oldest_packet_to_nack_ = 250;
- jitter_buffer_ = new VCMJitterBuffer(clock_.get(), &event_factory_, -1, -1,
- true);
+ jitter_buffer_ = new VCMJitterBuffer(clock_.get(), &event_factory_);
stream_generator_ = new StreamGenerator(0, 0, clock_->TimeInMilliseconds());
jitter_buffer_->Start();
jitter_buffer_->SetNackSettings(max_nack_list_size_,
@@ -2034,4 +2033,31 @@ TEST_F(TestJitterBufferNack, NormalOperationWrap2) {
EXPECT_EQ(65535, list[0]);
}
+TEST_F(TestJitterBufferNack, ResetByFutureKeyFrameDoesntError) {
+ stream_generator_->Init(0, 0, clock_->TimeInMilliseconds());
+ InsertFrame(kVideoFrameKey);
+ EXPECT_TRUE(DecodeCompleteFrame());
+ uint16_t nack_list_size = 0;
+ bool extended = false;
+ jitter_buffer_->GetNackList(&nack_list_size, &extended);
+ EXPECT_EQ(0, nack_list_size);
+
+ // Far-into-the-future video frame, could be caused by resetting the encoder
+ // or otherwise restarting. This should not fail when error when the packet is
+ // a keyframe, even if all of the nack list needs to be flushed.
+ stream_generator_->Init(10000, 0, clock_->TimeInMilliseconds());
+ clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
+ InsertFrame(kVideoFrameKey);
+ EXPECT_TRUE(DecodeCompleteFrame());
+ jitter_buffer_->GetNackList(&nack_list_size, &extended);
+ EXPECT_EQ(0, nack_list_size);
+
+ // Stream should be decodable from this point.
+ clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
+ InsertFrame(kVideoFrameDelta);
+ EXPECT_TRUE(DecodeCompleteFrame());
+ jitter_buffer_->GetNackList(&nack_list_size, &extended);
+ EXPECT_EQ(0, nack_list_size);
+}
+
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_estimator.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_estimator.cc
index deb036300ee..71c54a00cd2 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_estimator.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/jitter_estimator.cc
@@ -11,7 +11,6 @@
#include "webrtc/modules/video_coding/main/source/internal_defines.h"
#include "webrtc/modules/video_coding/main/source/jitter_estimator.h"
#include "webrtc/modules/video_coding/main/source/rtt_filter.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include <assert.h>
#include <math.h>
@@ -20,20 +19,20 @@
namespace webrtc {
-VCMJitterEstimator::VCMJitterEstimator(int32_t vcmId, int32_t receiverId) :
-_vcmId(vcmId),
-_receiverId(receiverId),
-_phi(0.97),
-_psi(0.9999),
-_alphaCountMax(400),
-_thetaLow(0.000001),
-_nackLimit(3),
-_numStdDevDelayOutlier(15),
-_numStdDevFrameSizeOutlier(3),
-_noiseStdDevs(2.33), // ~Less than 1% chance
- // (look up in normal distribution table)...
-_noiseStdDevOffset(30.0), // ...of getting 30 ms freezes
-_rttFilter(vcmId, receiverId) {
+VCMJitterEstimator::VCMJitterEstimator(int32_t vcmId, int32_t receiverId)
+ : _vcmId(vcmId),
+ _receiverId(receiverId),
+ _phi(0.97),
+ _psi(0.9999),
+ _alphaCountMax(400),
+ _thetaLow(0.000001),
+ _nackLimit(3),
+ _numStdDevDelayOutlier(15),
+ _numStdDevFrameSizeOutlier(3),
+ _noiseStdDevs(2.33), // ~Less than 1% chance
+ // (look up in normal distribution table)...
+ _noiseStdDevOffset(30.0), // ...of getting 30 ms freezes
+ _rttFilter() {
Reset();
}
@@ -108,10 +107,6 @@ void
VCMJitterEstimator::UpdateEstimate(int64_t frameDelayMS, uint32_t frameSizeBytes,
bool incompleteFrame /* = false */)
{
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(_vcmId, _receiverId),
- "Jitter estimate updated with: frameSize=%d frameDelayMS=%d",
- frameSizeBytes, frameDelayMS);
if (frameSizeBytes == 0)
{
return;
@@ -162,7 +157,7 @@ VCMJitterEstimator::UpdateEstimate(int64_t frameDelayMS, uint32_t frameSizeBytes
// deviation is probably due to an incorrect line slope.
double deviation = DeviationFromExpectedDelay(frameDelayMS, deltaFS);
- if (abs(deviation) < _numStdDevDelayOutlier * sqrt(_varNoise) ||
+ if (fabs(deviation) < _numStdDevDelayOutlier * sqrt(_varNoise) ||
frameSizeBytes > _avgFrameSize + _numStdDevFrameSizeOutlier * sqrt(_varFrameSize))
{
// Update the variance of the deviation from the
@@ -195,16 +190,6 @@ VCMJitterEstimator::UpdateEstimate(int64_t frameDelayMS, uint32_t frameSizeBytes
{
_startupCount++;
}
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
- "Framesize statistics: max=%f average=%f", _maxFrameSize, _avgFrameSize);
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
- "The estimated slope is: theta=(%f, %f)", _theta[0], _theta[1]);
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
- "Random jitter: mean=%f variance=%f", _avgNoise, _varNoise);
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
- "Current jitter estimate: %f", _filterJitterEstimate);
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
- "Current max RTT: %u", _rttFilter.RttMs());
}
// Updates the nack/packet ratio
@@ -257,7 +242,7 @@ VCMJitterEstimator::KalmanEstimateChannel(int64_t frameDelayMS,
{
return;
}
- double sigma = (300.0 * exp(-abs(static_cast<double>(deltaFSBytes)) /
+ double sigma = (300.0 * exp(-fabs(static_cast<double>(deltaFSBytes)) /
(1e0 * _maxFrameSize)) + 1) * sqrt(_varNoise);
if (sigma < 1.0)
{
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.cc
index 27fa6819338..4dc72253be3 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.cc
@@ -14,13 +14,68 @@
#include "webrtc/modules/video_coding/main/source/qm_select.h"
#include "webrtc/modules/video_coding/utility/include/frame_dropper.h"
#include "webrtc/system_wrappers/interface/clock.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
namespace media_optimization {
+namespace {
+void UpdateProtectionCallback(
+ VCMProtectionMethod* selected_method,
+ uint32_t* video_rate_bps,
+ uint32_t* nack_overhead_rate_bps,
+ uint32_t* fec_overhead_rate_bps,
+ VCMProtectionCallback* video_protection_callback) {
+ FecProtectionParams delta_fec_params;
+ FecProtectionParams key_fec_params;
+ // Get the FEC code rate for Key frames (set to 0 when NA).
+ key_fec_params.fec_rate = selected_method->RequiredProtectionFactorK();
+
+ // Get the FEC code rate for Delta frames (set to 0 when NA).
+ delta_fec_params.fec_rate = selected_method->RequiredProtectionFactorD();
+
+ // Get the FEC-UEP protection status for Key frames: UEP on/off.
+ key_fec_params.use_uep_protection = selected_method->RequiredUepProtectionK();
+
+ // Get the FEC-UEP protection status for Delta frames: UEP on/off.
+ delta_fec_params.use_uep_protection =
+ selected_method->RequiredUepProtectionD();
-MediaOptimization::MediaOptimization(int32_t id, Clock* clock)
- : id_(id),
- clock_(clock),
+ // The RTP module currently requires the same |max_fec_frames| for both
+ // key and delta frames.
+ delta_fec_params.max_fec_frames = selected_method->MaxFramesFec();
+ key_fec_params.max_fec_frames = selected_method->MaxFramesFec();
+
+ // Set the FEC packet mask type. |kFecMaskBursty| is more effective for
+ // consecutive losses and little/no packet re-ordering. As we currently
+ // do not have feedback data on the degree of correlated losses and packet
+ // re-ordering, we keep default setting to |kFecMaskRandom| for now.
+ delta_fec_params.fec_mask_type = kFecMaskRandom;
+ key_fec_params.fec_mask_type = kFecMaskRandom;
+
+ // TODO(Marco): Pass FEC protection values per layer.
+ video_protection_callback->ProtectionRequest(&delta_fec_params,
+ &key_fec_params,
+ video_rate_bps,
+ nack_overhead_rate_bps,
+ fec_overhead_rate_bps);
+}
+} // namespace
+
+struct MediaOptimization::EncodedFrameSample {
+ EncodedFrameSample(int size_bytes,
+ uint32_t timestamp,
+ int64_t time_complete_ms)
+ : size_bytes(size_bytes),
+ timestamp(timestamp),
+ time_complete_ms(time_complete_ms) {}
+
+ uint32_t size_bytes;
+ uint32_t timestamp;
+ int64_t time_complete_ms;
+};
+
+MediaOptimization::MediaOptimization(Clock* clock)
+ : clock_(clock),
max_bit_rate_(0),
send_codec_type_(kVideoCodecUnknown),
codec_width_(0),
@@ -35,8 +90,6 @@ MediaOptimization::MediaOptimization(int32_t id, Clock* clock)
target_bit_rate_(0),
incoming_frame_rate_(0),
enable_qm_(false),
- video_protection_callback_(NULL),
- video_qmsettings_callback_(NULL),
encoded_frame_samples_(),
avg_sent_bit_rate_bps_(0),
avg_sent_framerate_(0),
@@ -59,7 +112,8 @@ MediaOptimization::~MediaOptimization(void) {
loss_prot_logic_->Release();
}
-int32_t MediaOptimization::Reset() {
+void MediaOptimization::Reset() {
+ SetEncodingData(kVideoCodecUnknown, 0, 0, 0, 0, 0, 0, max_payload_size_);
memset(incoming_frame_times_, -1, sizeof(incoming_frame_times_));
incoming_frame_rate_ = 0.0;
frame_dropper_->Reset();
@@ -81,12 +135,52 @@ int32_t MediaOptimization::Reset() {
encoded_frame_samples_.clear();
avg_sent_bit_rate_bps_ = 0;
num_layers_ = 1;
- return VCM_OK;
}
-uint32_t MediaOptimization::SetTargetRates(uint32_t target_bitrate,
- uint8_t fraction_lost,
- uint32_t round_trip_time_ms) {
+void MediaOptimization::SetEncodingData(VideoCodecType send_codec_type,
+ int32_t max_bit_rate,
+ uint32_t frame_rate,
+ uint32_t target_bitrate,
+ uint16_t width,
+ uint16_t height,
+ int num_layers,
+ int32_t mtu) {
+ // Everything codec specific should be reset here since this means the codec
+ // has changed. If native dimension values have changed, then either user
+ // initiated change, or QM initiated change. Will be able to determine only
+ // after the processing of the first frame.
+ last_change_time_ = clock_->TimeInMilliseconds();
+ content_->Reset();
+ content_->UpdateFrameRate(frame_rate);
+
+ max_bit_rate_ = max_bit_rate;
+ send_codec_type_ = send_codec_type;
+ target_bit_rate_ = target_bitrate;
+ float target_bitrate_kbps = static_cast<float>(target_bitrate) / 1000.0f;
+ loss_prot_logic_->UpdateBitRate(target_bitrate_kbps);
+ loss_prot_logic_->UpdateFrameRate(static_cast<float>(frame_rate));
+ loss_prot_logic_->UpdateFrameSize(width, height);
+ loss_prot_logic_->UpdateNumLayers(num_layers);
+ frame_dropper_->Reset();
+ frame_dropper_->SetRates(target_bitrate_kbps, static_cast<float>(frame_rate));
+ user_frame_rate_ = static_cast<float>(frame_rate);
+ codec_width_ = width;
+ codec_height_ = height;
+ num_layers_ = (num_layers <= 1) ? 1 : num_layers; // Can also be zero.
+ max_payload_size_ = mtu;
+ qm_resolution_->Initialize(target_bitrate_kbps,
+ user_frame_rate_,
+ codec_width_,
+ codec_height_,
+ num_layers_);
+}
+
+uint32_t MediaOptimization::SetTargetRates(
+ uint32_t target_bitrate,
+ uint8_t fraction_lost,
+ uint32_t round_trip_time_ms,
+ VCMProtectionCallback* protection_callback,
+ VCMQMSettingsCallback* qmsettings_callback) {
// TODO(holmer): Consider putting this threshold only on the video bitrate,
// and not on protection.
if (max_bit_rate_ > 0 &&
@@ -145,10 +239,13 @@ uint32_t MediaOptimization::SetTargetRates(uint32_t target_bitrate,
// Get the bit cost of protection method, based on the amount of
// overhead data actually transmitted (including headers) the last
// second.
- UpdateProtectionCallback(selected_method,
- &sent_video_rate_bps,
- &sent_nack_rate_bps,
- &sent_fec_rate_bps);
+ if (protection_callback) {
+ UpdateProtectionCallback(selected_method,
+ &sent_video_rate_bps,
+ &sent_nack_rate_bps,
+ &sent_fec_rate_bps,
+ protection_callback);
+ }
uint32_t sent_total_rate_bps =
sent_video_rate_bps + sent_nack_rate_bps + sent_fec_rate_bps;
// Estimate the overhead costs of the next second as staying the same
@@ -178,7 +275,7 @@ uint32_t MediaOptimization::SetTargetRates(uint32_t target_bitrate,
static_cast<float>(target_bit_rate_) / 1000.0f;
frame_dropper_->SetRates(target_video_bitrate_kbps, incoming_frame_rate_);
- if (enable_qm_) {
+ if (enable_qm_ && qmsettings_callback) {
// Update QM with rates.
qm_resolution_->UpdateRates(target_video_bitrate_kbps,
sent_video_rate_kbps,
@@ -187,7 +284,7 @@ uint32_t MediaOptimization::SetTargetRates(uint32_t target_bitrate,
// Check for QM selection.
bool select_qm = CheckStatusForQMchange();
if (select_qm) {
- SelectQuality();
+ SelectQuality(qmsettings_callback);
}
// Reset the short-term averaged content data.
content_->ResetShortTermAvgData();
@@ -198,44 +295,6 @@ uint32_t MediaOptimization::SetTargetRates(uint32_t target_bitrate,
return target_bit_rate_;
}
-int32_t MediaOptimization::SetEncodingData(VideoCodecType send_codec_type,
- int32_t max_bit_rate,
- uint32_t frame_rate,
- uint32_t target_bitrate,
- uint16_t width,
- uint16_t height,
- int num_layers) {
- // Everything codec specific should be reset here since this means the codec
- // has changed. If native dimension values have changed, then either user
- // initiated change, or QM initiated change. Will be able to determine only
- // after the processing of the first frame.
- last_change_time_ = clock_->TimeInMilliseconds();
- content_->Reset();
- content_->UpdateFrameRate(frame_rate);
-
- max_bit_rate_ = max_bit_rate;
- send_codec_type_ = send_codec_type;
- target_bit_rate_ = target_bitrate;
- float target_bitrate_kbps = static_cast<float>(target_bitrate) / 1000.0f;
- loss_prot_logic_->UpdateBitRate(target_bitrate_kbps);
- loss_prot_logic_->UpdateFrameRate(static_cast<float>(frame_rate));
- loss_prot_logic_->UpdateFrameSize(width, height);
- loss_prot_logic_->UpdateNumLayers(num_layers);
- frame_dropper_->Reset();
- frame_dropper_->SetRates(target_bitrate_kbps, static_cast<float>(frame_rate));
- user_frame_rate_ = static_cast<float>(frame_rate);
- codec_width_ = width;
- codec_height_ = height;
- num_layers_ = (num_layers <= 1) ? 1 : num_layers; // Can also be zero.
- int32_t ret = VCM_OK;
- ret = qm_resolution_->Initialize(target_bitrate_kbps,
- user_frame_rate_,
- codec_width_,
- codec_height_,
- num_layers_);
- return ret;
-}
-
void MediaOptimization::EnableProtectionMethod(bool enable,
VCMProtectionMethodEnum method) {
bool updated = false;
@@ -249,11 +308,6 @@ void MediaOptimization::EnableProtectionMethod(bool enable,
}
}
-bool MediaOptimization::IsProtectionMethodEnabled(
- VCMProtectionMethodEnum method) {
- return (loss_prot_logic_->SelectedType() == method);
-}
-
uint32_t MediaOptimization::InputFrameRate() {
ProcessIncomingFrameRate(clock_->TimeInMilliseconds());
return uint32_t(incoming_frame_rate_ + 0.5f);
@@ -272,6 +326,13 @@ uint32_t MediaOptimization::SentBitRate() {
return avg_sent_bit_rate_bps_;
}
+VCMFrameCount MediaOptimization::SentFrameCount() {
+ VCMFrameCount count;
+ count.numDeltaFrames = delta_frame_cnt_;
+ count.numKeyFrames = key_frame_cnt_;
+ return count;
+}
+
int32_t MediaOptimization::UpdateWithEncodedData(int encoded_length,
uint32_t timestamp,
FrameType encoded_frame_type) {
@@ -325,29 +386,14 @@ int32_t MediaOptimization::UpdateWithEncodedData(int encoded_length,
return VCM_OK;
}
-int32_t MediaOptimization::RegisterProtectionCallback(
- VCMProtectionCallback* protection_callback) {
- video_protection_callback_ = protection_callback;
- return VCM_OK;
-}
-
-int32_t MediaOptimization::RegisterVideoQMCallback(
- VCMQMSettingsCallback* video_qmsettings) {
- video_qmsettings_callback_ = video_qmsettings;
- // Callback setting controls QM.
- if (video_qmsettings_callback_ != NULL) {
- enable_qm_ = true;
- } else {
- enable_qm_ = false;
- }
- return VCM_OK;
-}
+void MediaOptimization::EnableQM(bool enable) { enable_qm_ = enable; }
void MediaOptimization::EnableFrameDropper(bool enable) {
frame_dropper_->Enable(enable);
}
bool MediaOptimization::DropFrame() {
+ UpdateIncomingFrameRate();
// Leak appropriate number of bytes.
frame_dropper_->Leak((uint32_t)(InputFrameRate() + 0.5f));
if (video_suspended_) {
@@ -356,12 +402,6 @@ bool MediaOptimization::DropFrame() {
return frame_dropper_->DropFrame();
}
-int32_t MediaOptimization::SentFrameCount(VCMFrameCount* frame_count) const {
- frame_count->numDeltaFrames = delta_frame_cnt_;
- frame_count->numKeyFrames = key_frame_cnt_;
- return VCM_OK;
-}
-
void MediaOptimization::UpdateIncomingFrameRate() {
int64_t now = clock_->TimeInMilliseconds();
if (incoming_frame_times_[0] == 0) {
@@ -388,7 +428,8 @@ void MediaOptimization::UpdateContentData(
}
}
-int32_t MediaOptimization::SelectQuality() {
+int32_t MediaOptimization::SelectQuality(
+ VCMQMSettingsCallback* video_qmsettings_callback) {
// Reset quantities for QM select.
qm_resolution_->ResetQM();
@@ -403,7 +444,7 @@ int32_t MediaOptimization::SelectQuality() {
}
// Check for updates to spatial/temporal modes.
- QMUpdate(qm);
+ QMUpdate(qm, video_qmsettings_callback);
// Reset all the rate and related frame counters quantities.
qm_resolution_->ResetRates();
@@ -426,50 +467,7 @@ void MediaOptimization::SuspendBelowMinBitrate(int threshold_bps,
video_suspended_ = false;
}
-// Private methods below this line.
-
-int MediaOptimization::UpdateProtectionCallback(
- VCMProtectionMethod* selected_method,
- uint32_t* video_rate_bps,
- uint32_t* nack_overhead_rate_bps,
- uint32_t* fec_overhead_rate_bps) {
- if (!video_protection_callback_) {
- return VCM_OK;
- }
- FecProtectionParams delta_fec_params;
- FecProtectionParams key_fec_params;
- // Get the FEC code rate for Key frames (set to 0 when NA).
- key_fec_params.fec_rate = selected_method->RequiredProtectionFactorK();
-
- // Get the FEC code rate for Delta frames (set to 0 when NA).
- delta_fec_params.fec_rate = selected_method->RequiredProtectionFactorD();
-
- // Get the FEC-UEP protection status for Key frames: UEP on/off.
- key_fec_params.use_uep_protection = selected_method->RequiredUepProtectionK();
-
- // Get the FEC-UEP protection status for Delta frames: UEP on/off.
- delta_fec_params.use_uep_protection =
- selected_method->RequiredUepProtectionD();
-
- // The RTP module currently requires the same |max_fec_frames| for both
- // key and delta frames.
- delta_fec_params.max_fec_frames = selected_method->MaxFramesFec();
- key_fec_params.max_fec_frames = selected_method->MaxFramesFec();
-
- // Set the FEC packet mask type. |kFecMaskBursty| is more effective for
- // consecutive losses and little/no packet re-ordering. As we currently
- // do not have feedback data on the degree of correlated losses and packet
- // re-ordering, we keep default setting to |kFecMaskRandom| for now.
- delta_fec_params.fec_mask_type = kFecMaskRandom;
- key_fec_params.fec_mask_type = kFecMaskRandom;
-
- // TODO(Marco): Pass FEC protection values per layer.
- return video_protection_callback_->ProtectionRequest(&delta_fec_params,
- &key_fec_params,
- video_rate_bps,
- nack_overhead_rate_bps,
- fec_overhead_rate_bps);
-}
+bool MediaOptimization::IsVideoSuspended() const { return video_suspended_; }
void MediaOptimization::PurgeOldFrameSamples(int64_t now_ms) {
while (!encoded_frame_samples_.empty()) {
@@ -518,7 +516,9 @@ void MediaOptimization::UpdateSentFramerate() {
}
}
-bool MediaOptimization::QMUpdate(VCMResolutionScale* qm) {
+bool MediaOptimization::QMUpdate(
+ VCMResolutionScale* qm,
+ VCMQMSettingsCallback* video_qmsettings_callback) {
// Check for no change.
if (!qm->change_resolution_spatial && !qm->change_resolution_temporal) {
return false;
@@ -537,13 +537,9 @@ bool MediaOptimization::QMUpdate(VCMResolutionScale* qm) {
codec_height_ = qm->codec_height;
}
- WEBRTC_TRACE(webrtc::kTraceDebug,
- webrtc::kTraceVideoCoding,
- id_,
- "Resolution change from QM select: W = %d, H = %d, FR = %f",
- qm->codec_width,
- qm->codec_height,
- qm->frame_rate);
+ LOG(LS_INFO) << "Media optimizer requests the video resolution to be changed "
+ "to " << qm->codec_width << "x" << qm->codec_height << "@"
+ << qm->frame_rate;
// Update VPM with new target frame rate and frame size.
// Note: use |qm->frame_rate| instead of |_incoming_frame_rate| for updating
@@ -551,7 +547,7 @@ bool MediaOptimization::QMUpdate(VCMResolutionScale* qm) {
// will vary/fluctuate, and since we don't want to change the state of the
// VPM frame dropper, unless a temporal action was selected, we use the
// quantity |qm->frame_rate| for updating.
- video_qmsettings_callback_->SetVideoQMSettings(
+ video_qmsettings_callback->SetVideoQMSettings(
qm->frame_rate, codec_width_, codec_height_);
content_->UpdateFrameRate(qm->frame_rate);
qm_resolution_->UpdateCodecParameters(
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.h b/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.h
index cde28d23288..35a49712504 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization.h
@@ -18,7 +18,6 @@
#include "webrtc/modules/video_coding/main/source/media_opt_util.h"
#include "webrtc/modules/video_coding/main/source/qm_select.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
@@ -29,33 +28,25 @@ class VCMContentMetricsProcessing;
namespace media_optimization {
-enum {
- kBitrateMaxFrameSamples = 60
-};
-enum {
- kBitrateAverageWinMs = 1000
-};
-
-struct EncodedFrameSample {
- EncodedFrameSample(int size_bytes,
- uint32_t timestamp,
- int64_t time_complete_ms)
- : size_bytes(size_bytes),
- timestamp(timestamp),
- time_complete_ms(time_complete_ms) {}
-
- uint32_t size_bytes;
- uint32_t timestamp;
- int64_t time_complete_ms;
-};
-
+// TODO(andresp): Make thread safe.
class MediaOptimization {
public:
- MediaOptimization(int32_t id, Clock* clock);
- ~MediaOptimization(void);
+ explicit MediaOptimization(Clock* clock);
+ ~MediaOptimization();
+
+ // TODO(andresp): Can Reset and SetEncodingData be done at construction time
+ // only?
+ void Reset();
- // Resets the Media Optimization module.
- int32_t Reset();
+ // Informs media optimization of initial encoding state.
+ void SetEncodingData(VideoCodecType send_codec_type,
+ int32_t max_bit_rate,
+ uint32_t frame_rate,
+ uint32_t bit_rate,
+ uint16_t width,
+ uint16_t height,
+ int num_temporal_layers,
+ int32_t mtu);
// Sets target rates for the encoder given the channel parameters.
// Inputs: target bitrate - the encoder target bitrate in bits/s.
@@ -63,95 +54,64 @@ class MediaOptimization {
// round_trip_time_ms - round trip time in milliseconds.
// min_bit_rate - the bit rate of the end-point with lowest rate.
// max_bit_rate - the bit rate of the end-point with highest rate.
+ // TODO(andresp): Find if the callbacks can be triggered only after releasing
+ // an internal critical section.
uint32_t SetTargetRates(uint32_t target_bitrate,
uint8_t fraction_lost,
- uint32_t round_trip_time_ms);
+ uint32_t round_trip_time_ms,
+ VCMProtectionCallback* protection_callback,
+ VCMQMSettingsCallback* qmsettings_callback);
- // Informs media optimization of initial encoding state.
- int32_t SetEncodingData(VideoCodecType send_codec_type,
- int32_t max_bit_rate,
- uint32_t frame_rate,
- uint32_t bit_rate,
- uint16_t width,
- uint16_t height,
- int num_temporal_layers);
-
- // Enables protection method.
void EnableProtectionMethod(bool enable, VCMProtectionMethodEnum method);
+ void EnableQM(bool enable);
+ void EnableFrameDropper(bool enable);
- // Returns weather or not protection method is enabled.
- bool IsProtectionMethodEnabled(VCMProtectionMethodEnum method);
-
- // Returns the actual input frame rate.
- uint32_t InputFrameRate();
+ // Lets the sender suspend video when the rate drops below
+ // |threshold_bps|, and turns back on when the rate goes back up above
+ // |threshold_bps| + |window_bps|.
+ void SuspendBelowMinBitrate(int threshold_bps, int window_bps);
+ bool IsVideoSuspended() const;
- // Returns the actual sent frame rate.
- uint32_t SentFrameRate();
+ bool DropFrame();
- // Returns the actual sent bit rate.
- uint32_t SentBitRate();
+ void UpdateContentData(const VideoContentMetrics* content_metrics);
// Informs Media Optimization of encoding output: Length and frame type.
int32_t UpdateWithEncodedData(int encoded_length,
uint32_t timestamp,
FrameType encoded_frame_type);
- // Registers a protection callback to be used to inform the user about the
- // protection methods used.
- int32_t RegisterProtectionCallback(
- VCMProtectionCallback* protection_callback);
-
- // Registers a quality settings callback to be used to inform VPM/user.
- int32_t RegisterVideoQMCallback(VCMQMSettingsCallback* video_qmsettings);
-
- void EnableFrameDropper(bool enable);
-
- bool DropFrame();
-
- // Returns the number of key/delta frames encoded.
- int32_t SentFrameCount(VCMFrameCount* frame_count) const;
-
- // Updates incoming frame rate value.
- void UpdateIncomingFrameRate();
-
- // Update content metric data.
- void UpdateContentData(const VideoContentMetrics* content_metrics);
-
- // Computes new Quality Mode.
- int32_t SelectQuality();
-
- // Lets the sender suspend video when the rate drops below
- // |threshold_bps|, and turns back on when the rate goes back up above
- // |threshold_bps| + |window_bps|.
- void SuspendBelowMinBitrate(int threshold_bps, int window_bps);
-
- // Accessors and mutators.
- int32_t max_bit_rate() const { return max_bit_rate_; }
- void set_max_payload_size(int32_t mtu) { max_payload_size_ = mtu; }
- bool video_suspended() const { return video_suspended_; }
+ uint32_t InputFrameRate();
+ uint32_t SentFrameRate();
+ uint32_t SentBitRate();
+ VCMFrameCount SentFrameCount();
private:
- typedef std::list<EncodedFrameSample> FrameSampleList;
enum {
kFrameCountHistorySize = 90
};
enum {
kFrameHistoryWinMs = 2000
};
+ enum {
+ kBitrateAverageWinMs = 1000
+ };
- // Updates protection callback with protection settings.
- int UpdateProtectionCallback(VCMProtectionMethod* selected_method,
- uint32_t* total_video_rate_bps,
- uint32_t* nack_overhead_rate_bps,
- uint32_t* fec_overhead_rate_bps);
+ struct EncodedFrameSample;
+ typedef std::list<EncodedFrameSample> FrameSampleList;
+ void UpdateIncomingFrameRate();
void PurgeOldFrameSamples(int64_t now_ms);
void UpdateSentBitrate(int64_t now_ms);
void UpdateSentFramerate();
+ // Computes new Quality Mode.
+ int32_t SelectQuality(VCMQMSettingsCallback* qmsettings_callback);
+
// Verifies if QM settings differ from default, i.e. if an update is required.
// Computes actual values, as will be sent to the encoder.
- bool QMUpdate(VCMResolutionScale* qm);
+ bool QMUpdate(VCMResolutionScale* qm,
+ VCMQMSettingsCallback* qmsettings_callback);
// Checks if we should make a QM change. Return true if yes, false otherwise.
bool CheckStatusForQMchange();
@@ -163,7 +123,6 @@ class MediaOptimization {
// the state of |video_suspended_| accordingly.
void CheckSuspendConditions();
- int32_t id_;
Clock* clock_;
int32_t max_bit_rate_;
VideoCodecType send_codec_type_;
@@ -180,8 +139,6 @@ class MediaOptimization {
float incoming_frame_rate_;
int64_t incoming_frame_times_[kFrameCountHistorySize];
bool enable_qm_;
- VCMProtectionCallback* video_protection_callback_;
- VCMQMSettingsCallback* video_qmsettings_callback_;
std::list<EncodedFrameSample> encoded_frame_samples_;
uint32_t avg_sent_bit_rate_bps_;
uint32_t avg_sent_framerate_;
@@ -196,8 +153,7 @@ class MediaOptimization {
bool video_suspended_;
int suspension_threshold_bps_;
int suspension_window_bps_;
-}; // End of MediaOptimization class declaration.
-
+};
} // namespace media_optimization
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc
index 1425dad837b..bacfdc604ca 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/media_optimization_unittest.cc
@@ -18,9 +18,6 @@ namespace media_optimization {
class TestMediaOptimization : public ::testing::Test {
protected:
enum {
- kId = 4711 // Id number for the MediaOptimization class.
- };
- enum {
kSampleRate = 90000 // RTP timestamps per second.
};
@@ -28,14 +25,13 @@ class TestMediaOptimization : public ::testing::Test {
// a special case (e.g. frame rate in media optimization).
TestMediaOptimization()
: clock_(1000),
- media_opt_(kId, &clock_),
+ media_opt_(&clock_),
frame_time_ms_(33),
next_timestamp_(0) {}
// This method mimics what happens in VideoSender::AddVideoFrame.
void AddFrameAndAdvanceTime(int bitrate_bps, bool expect_frame_drop) {
ASSERT_GE(bitrate_bps, 0);
- media_opt_.UpdateIncomingFrameRate();
bool frame_dropped = media_opt_.DropFrame();
EXPECT_EQ(expect_frame_drop, frame_dropped);
if (!frame_dropped) {
@@ -63,12 +59,14 @@ TEST_F(TestMediaOptimization, VerifyMuting) {
media_opt_.SuspendBelowMinBitrate(kThresholdBps, kWindowBps);
// The video should not be suspended from the start.
- EXPECT_FALSE(media_opt_.video_suspended());
+ EXPECT_FALSE(media_opt_.IsVideoSuspended());
int target_bitrate_kbps = 100;
media_opt_.SetTargetRates(target_bitrate_kbps * 1000,
0, // Lossrate.
- 100); // RTT in ms.
+ 100,
+ NULL,
+ NULL); // RTT in ms.
media_opt_.EnableFrameDropper(true);
for (int time = 0; time < 2000; time += frame_time_ms_) {
ASSERT_NO_FATAL_FAILURE(AddFrameAndAdvanceTime(target_bitrate_kbps, false));
@@ -77,11 +75,13 @@ TEST_F(TestMediaOptimization, VerifyMuting) {
// Set the target rate below the limit for muting.
media_opt_.SetTargetRates(kThresholdBps - 1000,
0, // Lossrate.
- 100); // RTT in ms.
+ 100,
+ NULL,
+ NULL); // RTT in ms.
// Expect the muter to engage immediately and stay muted.
// Test during 2 seconds.
for (int time = 0; time < 2000; time += frame_time_ms_) {
- EXPECT_TRUE(media_opt_.video_suspended());
+ EXPECT_TRUE(media_opt_.IsVideoSuspended());
ASSERT_NO_FATAL_FAILURE(AddFrameAndAdvanceTime(target_bitrate_kbps, true));
}
@@ -89,22 +89,26 @@ TEST_F(TestMediaOptimization, VerifyMuting) {
// limit + window.
media_opt_.SetTargetRates(kThresholdBps + 1000,
0, // Lossrate.
- 100); // RTT in ms.
- // Expect the muter to stay muted.
+ 100,
+ NULL,
+ NULL); // RTT in ms.
+ // Expect the muter to stay muted.
// Test during 2 seconds.
for (int time = 0; time < 2000; time += frame_time_ms_) {
- EXPECT_TRUE(media_opt_.video_suspended());
+ EXPECT_TRUE(media_opt_.IsVideoSuspended());
ASSERT_NO_FATAL_FAILURE(AddFrameAndAdvanceTime(target_bitrate_kbps, true));
}
// Set the target above limit + window.
media_opt_.SetTargetRates(kThresholdBps + kWindowBps + 1000,
0, // Lossrate.
- 100); // RTT in ms.
+ 100,
+ NULL,
+ NULL); // RTT in ms.
// Expect the muter to disengage immediately.
// Test during 2 seconds.
for (int time = 0; time < 2000; time += frame_time_ms_) {
- EXPECT_FALSE(media_opt_.video_suspended());
+ EXPECT_FALSE(media_opt_.IsVideoSuspended());
ASSERT_NO_FATAL_FAILURE(
AddFrameAndAdvanceTime((kThresholdBps + kWindowBps) / 1000, false));
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/packet.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/packet.cc
index 61ef2ee859a..c1f1a048e8f 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/packet.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/packet.cc
@@ -19,6 +19,7 @@ VCMPacket::VCMPacket()
:
payloadType(0),
timestamp(0),
+ ntp_time_ms_(0),
seqNum(0),
dataPtr(NULL),
sizeBytes(0),
@@ -38,6 +39,7 @@ VCMPacket::VCMPacket(const uint8_t* ptr,
const WebRtcRTPHeader& rtpHeader) :
payloadType(rtpHeader.header.payloadType),
timestamp(rtpHeader.header.timestamp),
+ ntp_time_ms_(rtpHeader.ntp_time_ms),
seqNum(rtpHeader.header.sequenceNumber),
dataPtr(ptr),
sizeBytes(size),
@@ -58,6 +60,7 @@ VCMPacket::VCMPacket(const uint8_t* ptr,
VCMPacket::VCMPacket(const uint8_t* ptr, uint32_t size, uint16_t seq, uint32_t ts, bool mBit) :
payloadType(0),
timestamp(ts),
+ ntp_time_ms_(0),
seqNum(seq),
dataPtr(ptr),
sizeBytes(size),
@@ -76,6 +79,7 @@ VCMPacket::VCMPacket(const uint8_t* ptr, uint32_t size, uint16_t seq, uint32_t t
void VCMPacket::Reset() {
payloadType = 0;
timestamp = 0;
+ ntp_time_ms_ = 0;
seqNum = 0;
dataPtr = NULL;
sizeBytes = 0;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/packet.h b/chromium/third_party/webrtc/modules/video_coding/main/source/packet.h
index e9a81bb1fd2..242d3a43142 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/packet.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/packet.h
@@ -33,6 +33,8 @@ public:
uint8_t payloadType;
uint32_t timestamp;
+ // NTP time of the capture time in local timebase in milliseconds.
+ int64_t ntp_time_ms_;
uint16_t seqNum;
const uint8_t* dataPtr;
uint32_t sizeBytes;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/receiver.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/receiver.cc
index ae13ddd4211..e179423a77a 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/receiver.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/receiver.cc
@@ -12,11 +12,13 @@
#include <assert.h>
+#include <cstdlib>
+
#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
#include "webrtc/modules/video_coding/main/source/internal_defines.h"
#include "webrtc/modules/video_coding/main/source/media_opt_util.h"
#include "webrtc/system_wrappers/interface/clock.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
@@ -26,15 +28,11 @@ enum { kMaxReceiverDelayMs = 10000 };
VCMReceiver::VCMReceiver(VCMTiming* timing,
Clock* clock,
EventFactory* event_factory,
- int32_t vcm_id,
- int32_t receiver_id,
bool master)
: crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- vcm_id_(vcm_id),
clock_(clock),
- receiver_id_(receiver_id),
master_(master),
- jitter_buffer_(clock_, event_factory, vcm_id, receiver_id, master),
+ jitter_buffer_(clock_, event_factory),
timing_(timing),
render_wait_event_(event_factory->CreateEvent()),
state_(kPassive),
@@ -76,13 +74,6 @@ void VCMReceiver::UpdateRtt(uint32_t rtt) {
int32_t VCMReceiver::InsertPacket(const VCMPacket& packet,
uint16_t frame_width,
uint16_t frame_height) {
- if (packet.frameType == kVideoFrameKey) {
- WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "Inserting key frame packet seqnum=%u, timestamp=%u",
- packet.seqNum, packet.timestamp);
- }
-
// Insert the packet into the jitter buffer. The packet can either be empty or
// contain media at this point.
bool retransmitted = false;
@@ -93,10 +84,6 @@ int32_t VCMReceiver::InsertPacket(const VCMPacket& packet,
} else if (ret == kFlushIndicator) {
return VCM_FLUSH_INDICATOR;
} else if (ret < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "Error inserting packet seqnum=%u, timestamp=%u",
- packet.seqNum, packet.timestamp);
return VCM_JITTER_BUFFER_ERROR;
}
if (ret == kCompleteSession && !retransmitted) {
@@ -105,15 +92,6 @@ int32_t VCMReceiver::InsertPacket(const VCMPacket& packet,
// delay within the jitter estimate.
timing_->IncomingTimestamp(packet.timestamp, clock_->TimeInMilliseconds());
}
- if (master_) {
- // Only trace the primary receiver to make it possible to parse and plot
- // the trace file.
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "Packet seqnum=%u timestamp=%u inserted at %u",
- packet.seqNum, packet.timestamp,
- MaskWord64ToUWord32(clock_->TimeInMilliseconds()));
- }
return VCM_OK;
}
@@ -156,20 +134,17 @@ VCMEncodedFrame* VCMReceiver::FrameForDecoding(
// Assume that render timing errors are due to changes in the video stream.
if (next_render_time_ms < 0) {
timing_error = true;
- } else if (abs(next_render_time_ms - now_ms) > max_video_delay_ms_) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "This frame is out of our delay bounds, resetting jitter "
- "buffer: %d > %d",
- static_cast<int>(abs(next_render_time_ms - now_ms)),
- max_video_delay_ms_);
+ } else if (std::abs(next_render_time_ms - now_ms) > max_video_delay_ms_) {
+ int frame_delay = static_cast<int>(std::abs(next_render_time_ms - now_ms));
+ LOG(LS_WARNING) << "A frame about to be decoded is out of the configured "
+ << "delay bounds (" << frame_delay << " > "
+ << max_video_delay_ms_
+ << "). Resetting the video jitter buffer.";
timing_error = true;
} else if (static_cast<int>(timing_->TargetVideoDelay()) >
max_video_delay_ms_) {
- WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, receiver_id_),
- "More than %u ms target delay. Flushing jitter buffer and"
- "resetting timing.", max_video_delay_ms_);
+ LOG(LS_WARNING) << "The video target delay has grown larger than "
+ << max_video_delay_ms_ << " ms. Resetting jitter buffer.";
timing_error = true;
}
@@ -278,10 +253,7 @@ VCMNackStatus VCMReceiver::NackList(uint16_t* nack_list,
bool request_key_frame = false;
uint16_t* internal_nack_list = jitter_buffer_.GetNackList(
nack_list_length, &request_key_frame);
- if (*nack_list_length > size) {
- *nack_list_length = 0;
- return kNackNeedMoreMemory;
- }
+ assert(*nack_list_length <= size);
if (internal_nack_list != NULL && *nack_list_length > 0) {
memcpy(nack_list, internal_nack_list, *nack_list_length * sizeof(uint16_t));
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/receiver.h b/chromium/third_party/webrtc/modules/video_coding/main/source/receiver.h
index ac510ea22b3..c037303978f 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/receiver.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/receiver.h
@@ -25,7 +25,6 @@ class VCMEncodedFrame;
enum VCMNackStatus {
kNackOk,
- kNackNeedMoreMemory,
kNackKeyFrameRequest
};
@@ -40,8 +39,6 @@ class VCMReceiver {
VCMReceiver(VCMTiming* timing,
Clock* clock,
EventFactory* event_factory,
- int32_t vcm_id,
- int32_t receiver_id,
bool master);
~VCMReceiver();
@@ -95,9 +92,7 @@ class VCMReceiver {
static int32_t GenerateReceiverId();
CriticalSectionWrapper* crit_sect_;
- int32_t vcm_id_;
Clock* clock_;
- int32_t receiver_id_;
bool master_;
VCMJitterBuffer jitter_buffer_;
VCMTiming* timing_;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/receiver_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/receiver_unittest.cc
index 33a3d95f969..e80b9cc9e9e 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/receiver_unittest.cc
@@ -31,7 +31,7 @@ class TestVCMReceiver : public ::testing::Test {
TestVCMReceiver()
: clock_(new SimulatedClock(0)),
timing_(clock_.get()),
- receiver_(&timing_, clock_.get(), &event_factory_, 1, 1, true) {
+ receiver_(&timing_, clock_.get(), &event_factory_, true) {
stream_generator_.reset(new
StreamGenerator(0, 0, clock_->TimeInMilliseconds()));
memset(data_buffer_, 0, kDataBufferSize);
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.cc
index 25d89e54c08..739cc824c37 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.cc
@@ -10,7 +10,6 @@
#include "webrtc/modules/video_coding/main/source/internal_defines.h"
#include "webrtc/modules/video_coding/main/source/rtt_filter.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include <math.h>
#include <stdlib.h>
@@ -18,15 +17,11 @@
namespace webrtc {
-VCMRttFilter::VCMRttFilter(int32_t vcmId, int32_t receiverId)
-:
-_vcmId(vcmId),
-_receiverId(receiverId),
-_filtFactMax(35),
-_jumpStdDevs(2.5),
-_driftStdDevs(3.5),
-_detectThreshold(kMaxDriftJumpCount)
-{
+VCMRttFilter::VCMRttFilter()
+ : _filtFactMax(35),
+ _jumpStdDevs(2.5),
+ _driftStdDevs(3.5),
+ _detectThreshold(kMaxDriftJumpCount) {
Reset();
}
@@ -105,16 +100,13 @@ VCMRttFilter::Update(uint32_t rttMs)
_avgRtt = oldAvg;
_varRtt = oldVar;
}
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
- "RttFilter Update: sample=%u avgRtt=%f varRtt=%f maxRtt=%u",
- rttMs, _avgRtt, _varRtt, _maxRtt);
}
bool
VCMRttFilter::JumpDetection(uint32_t rttMs)
{
double diffFromAvg = _avgRtt - rttMs;
- if (abs(diffFromAvg) > _jumpStdDevs * sqrt(_varRtt))
+ if (fabs(diffFromAvg) > _jumpStdDevs * sqrt(_varRtt))
{
int diffSign = (diffFromAvg >= 0) ? 1 : -1;
int jumpCountSign = (_jumpCount >= 0) ? 1 : -1;
@@ -141,8 +133,6 @@ VCMRttFilter::JumpDetection(uint32_t rttMs)
ShortRttFilter(_jumpBuf, abs(_jumpCount));
_filtFactCount = _detectThreshold + 1;
_jumpCount = 0;
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
- "Detected an RTT jump");
}
else
{
@@ -174,8 +164,6 @@ VCMRttFilter::DriftDetection(uint32_t rttMs)
ShortRttFilter(_driftBuf, _driftCount);
_filtFactCount = _detectThreshold + 1;
_driftCount = 0;
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
- "Detected an RTT drift");
}
}
else
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.h b/chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.h
index 9ce3798b053..8b816a0b46c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/rtt_filter.h
@@ -19,7 +19,7 @@ namespace webrtc
class VCMRttFilter
{
public:
- VCMRttFilter(int32_t vcmId = 0, int32_t receiverId = 0);
+ VCMRttFilter();
VCMRttFilter& operator=(const VCMRttFilter& rhs);
@@ -48,8 +48,6 @@ private:
// Computes the short time average and maximum of the vector buf.
void ShortRttFilter(uint32_t* buf, uint32_t length);
- int32_t _vcmId;
- int32_t _receiverId;
bool _gotNonZeroUpdate;
double _avgRtt;
double _varRtt;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/session_info.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/session_info.cc
index 1cb60d39b58..dab3da1383d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/session_info.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/session_info.cc
@@ -11,6 +11,7 @@
#include "webrtc/modules/video_coding/main/source/session_info.h"
#include "webrtc/modules/video_coding/main/source/packet.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
@@ -403,6 +404,7 @@ int VCMSessionInfo::InsertPacket(const VCMPacket& packet,
}
if (packets_.size() == kMaxPacketsInSession) {
+ LOG(LS_ERROR) << "Max number of packets per frame has been reached.";
return -1;
}
@@ -429,6 +431,8 @@ int VCMSessionInfo::InsertPacket(const VCMPacket& packet,
first_packet_seq_num_ = static_cast<int>(packet.seqNum);
} else if (first_packet_seq_num_ != -1 &&
!IsNewerSequenceNumber(packet.seqNum, first_packet_seq_num_)) {
+ LOG(LS_WARNING) << "Received packet with a sequence number which is out of"
+ "frame boundaries";
return -3;
} else if (frame_type_ == kFrameEmpty && packet.frameType != kFrameEmpty) {
// Update the frame type with the type of the first media packet.
@@ -441,6 +445,8 @@ int VCMSessionInfo::InsertPacket(const VCMPacket& packet,
last_packet_seq_num_ = static_cast<int>(packet.seqNum);
} else if (last_packet_seq_num_ != -1 &&
IsNewerSequenceNumber(packet.seqNum, last_packet_seq_num_)) {
+ LOG(LS_WARNING) << "Received packet with a sequence number which is out of"
+ "frame boundaries";
return -3;
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/session_info.h b/chromium/third_party/webrtc/modules/video_coding/main/source/session_info.h
index 039f09763a2..cae3ee13892 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/session_info.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/session_info.h
@@ -147,7 +147,6 @@ class VCMSessionInfo {
bool complete_;
bool decodable_;
webrtc::FrameType frame_type_;
- bool previous_frame_loss_;
// Packets in this frame.
PacketList packets_;
int empty_seq_num_low_;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/timestamp_extrapolator.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/timestamp_extrapolator.cc
deleted file mode 100644
index 1d911a54e44..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/timestamp_extrapolator.cc
+++ /dev/null
@@ -1,248 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "webrtc/modules/video_coding/main/source/internal_defines.h"
-#include "webrtc/modules/video_coding/main/source/timestamp_extrapolator.h"
-#include "webrtc/system_wrappers/interface/clock.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
-namespace webrtc {
-
-VCMTimestampExtrapolator::VCMTimestampExtrapolator(Clock* clock,
- int32_t vcmId,
- int32_t id)
-:
-_rwLock(RWLockWrapper::CreateRWLock()),
-_vcmId(vcmId),
-_id(id),
-_clock(clock),
-_startMs(0),
-_firstTimestamp(0),
-_wrapArounds(0),
-_prevUnwrappedTimestamp(-1),
-_prevWrapTimestamp(-1),
-_lambda(1),
-_firstAfterReset(true),
-_packetCount(0),
-_startUpFilterDelayInPackets(2),
-_detectorAccumulatorPos(0),
-_detectorAccumulatorNeg(0),
-_alarmThreshold(60e3),
-_accDrift(6600), // in timestamp ticks, i.e. 15 ms
-_accMaxError(7000),
-_P11(1e10)
-{
- Reset();
-}
-
-VCMTimestampExtrapolator::~VCMTimestampExtrapolator()
-{
- delete _rwLock;
-}
-
-void
-VCMTimestampExtrapolator::Reset()
-{
- WriteLockScoped wl(*_rwLock);
- _startMs = _clock->TimeInMilliseconds();
- _prevMs = _startMs;
- _firstTimestamp = 0;
- _w[0] = 90.0;
- _w[1] = 0;
- _P[0][0] = 1;
- _P[1][1] = _P11;
- _P[0][1] = _P[1][0] = 0;
- _firstAfterReset = true;
- _prevUnwrappedTimestamp = -1;
- _prevWrapTimestamp = -1;
- _wrapArounds = 0;
- _packetCount = 0;
- _detectorAccumulatorPos = 0;
- _detectorAccumulatorNeg = 0;
-}
-
-void
-VCMTimestampExtrapolator::Update(int64_t tMs, uint32_t ts90khz, bool trace)
-{
-
- _rwLock->AcquireLockExclusive();
- if (tMs - _prevMs > 10e3)
- {
- // Ten seconds without a complete frame.
- // Reset the extrapolator
- _rwLock->ReleaseLockExclusive();
- Reset();
- _rwLock->AcquireLockExclusive();
- }
- else
- {
- _prevMs = tMs;
- }
-
- // Remove offset to prevent badly scaled matrices
- tMs -= _startMs;
-
- CheckForWrapArounds(ts90khz);
-
- int64_t unwrapped_ts90khz = static_cast<int64_t>(ts90khz) +
- _wrapArounds * ((static_cast<int64_t>(1) << 32) - 1);
-
- if (_prevUnwrappedTimestamp >= 0 &&
- unwrapped_ts90khz < _prevUnwrappedTimestamp)
- {
- // Drop reordered frames.
- _rwLock->ReleaseLockExclusive();
- return;
- }
-
- if (_firstAfterReset)
- {
- // Make an initial guess of the offset,
- // should be almost correct since tMs - _startMs
- // should about zero at this time.
- _w[1] = -_w[0] * tMs;
- _firstTimestamp = unwrapped_ts90khz;
- _firstAfterReset = false;
- }
-
- double residual =
- (static_cast<double>(unwrapped_ts90khz) - _firstTimestamp) -
- static_cast<double>(tMs) * _w[0] - _w[1];
- if (DelayChangeDetection(residual, trace) &&
- _packetCount >= _startUpFilterDelayInPackets)
- {
- // A sudden change of average network delay has been detected.
- // Force the filter to adjust its offset parameter by changing
- // the offset uncertainty. Don't do this during startup.
- _P[1][1] = _P11;
- }
- //T = [t(k) 1]';
- //that = T'*w;
- //K = P*T/(lambda + T'*P*T);
- double K[2];
- K[0] = _P[0][0] * tMs + _P[0][1];
- K[1] = _P[1][0] * tMs + _P[1][1];
- double TPT = _lambda + tMs * K[0] + K[1];
- K[0] /= TPT;
- K[1] /= TPT;
- //w = w + K*(ts(k) - that);
- _w[0] = _w[0] + K[0] * residual;
- _w[1] = _w[1] + K[1] * residual;
- //P = 1/lambda*(P - K*T'*P);
- double p00 = 1 / _lambda * (_P[0][0] - (K[0] * tMs * _P[0][0] + K[0] * _P[1][0]));
- double p01 = 1 / _lambda * (_P[0][1] - (K[0] * tMs * _P[0][1] + K[0] * _P[1][1]));
- _P[1][0] = 1 / _lambda * (_P[1][0] - (K[1] * tMs * _P[0][0] + K[1] * _P[1][0]));
- _P[1][1] = 1 / _lambda * (_P[1][1] - (K[1] * tMs * _P[0][1] + K[1] * _P[1][1]));
- _P[0][0] = p00;
- _P[0][1] = p01;
- _prevUnwrappedTimestamp = unwrapped_ts90khz;
- if (_packetCount < _startUpFilterDelayInPackets)
- {
- _packetCount++;
- }
- if (trace)
- {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _id), "w[0]=%f w[1]=%f ts=%u tMs=%u", _w[0], _w[1], ts90khz, tMs);
- }
- _rwLock->ReleaseLockExclusive();
-}
-
-int64_t
-VCMTimestampExtrapolator::ExtrapolateLocalTime(uint32_t timestamp90khz)
-{
- ReadLockScoped rl(*_rwLock);
- int64_t localTimeMs = 0;
- CheckForWrapArounds(timestamp90khz);
- double unwrapped_ts90khz = static_cast<double>(timestamp90khz) +
- _wrapArounds * ((static_cast<int64_t>(1) << 32) - 1);
- if (_packetCount == 0)
- {
- localTimeMs = -1;
- }
- else if (_packetCount < _startUpFilterDelayInPackets)
- {
- localTimeMs = _prevMs + static_cast<int64_t>(
- static_cast<double>(unwrapped_ts90khz - _prevUnwrappedTimestamp) /
- 90.0 + 0.5);
- }
- else
- {
- if (_w[0] < 1e-3)
- {
- localTimeMs = _startMs;
- }
- else
- {
- double timestampDiff = unwrapped_ts90khz -
- static_cast<double>(_firstTimestamp);
- localTimeMs = static_cast<int64_t>(
- static_cast<double>(_startMs) + (timestampDiff - _w[1]) /
- _w[0] + 0.5);
- }
- }
- return localTimeMs;
-}
-
-// Investigates if the timestamp clock has overflowed since the last timestamp and
-// keeps track of the number of wrap arounds since reset.
-void
-VCMTimestampExtrapolator::CheckForWrapArounds(uint32_t ts90khz)
-{
- if (_prevWrapTimestamp == -1)
- {
- _prevWrapTimestamp = ts90khz;
- return;
- }
- if (ts90khz < _prevWrapTimestamp)
- {
- // This difference will probably be less than -2^31 if we have had a wrap around
- // (e.g. timestamp = 1, _previousTimestamp = 2^32 - 1). Since it is casted to a Word32,
- // it should be positive.
- if (static_cast<int32_t>(ts90khz - _prevWrapTimestamp) > 0)
- {
- // Forward wrap around
- _wrapArounds++;
- }
- }
- // This difference will probably be less than -2^31 if we have had a backward wrap around.
- // Since it is casted to a Word32, it should be positive.
- else if (static_cast<int32_t>(_prevWrapTimestamp - ts90khz) > 0)
- {
- // Backward wrap around
- _wrapArounds--;
- }
- _prevWrapTimestamp = ts90khz;
-}
-
-bool
-VCMTimestampExtrapolator::DelayChangeDetection(double error, bool trace)
-{
- // CUSUM detection of sudden delay changes
- error = (error > 0) ? VCM_MIN(error, _accMaxError) : VCM_MAX(error, -_accMaxError);
- _detectorAccumulatorPos = VCM_MAX(_detectorAccumulatorPos + error - _accDrift, (double)0);
- _detectorAccumulatorNeg = VCM_MIN(_detectorAccumulatorNeg + error + _accDrift, (double)0);
- if (_detectorAccumulatorPos > _alarmThreshold || _detectorAccumulatorNeg < -_alarmThreshold)
- {
- // Alarm
- if (trace)
- {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _id), "g1=%f g2=%f alarm=1", _detectorAccumulatorPos, _detectorAccumulatorNeg);
- }
- _detectorAccumulatorPos = _detectorAccumulatorNeg = 0;
- return true;
- }
- if (trace)
- {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _id), "g1=%f g2=%f alarm=0", _detectorAccumulatorPos, _detectorAccumulatorNeg);
- }
- return false;
-}
-
-}
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/timestamp_extrapolator.h b/chromium/third_party/webrtc/modules/video_coding/main/source/timestamp_extrapolator.h
deleted file mode 100644
index 4565186a353..00000000000
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/timestamp_extrapolator.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_EXTRAPOLATOR_H_
-#define WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_EXTRAPOLATOR_H_
-
-#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
-#include "webrtc/typedefs.h"
-
-namespace webrtc
-{
-
-class Clock;
-
-class VCMTimestampExtrapolator
-{
-public:
- VCMTimestampExtrapolator(Clock* clock,
- int32_t vcmId = 0,
- int32_t receiverId = 0);
- ~VCMTimestampExtrapolator();
- void Update(int64_t tMs, uint32_t ts90khz, bool trace = true);
- int64_t ExtrapolateLocalTime(uint32_t timestamp90khz);
- void Reset();
-
-private:
- void CheckForWrapArounds(uint32_t ts90khz);
- bool DelayChangeDetection(double error, bool trace = true);
- RWLockWrapper* _rwLock;
- int32_t _vcmId;
- int32_t _id;
- Clock* _clock;
- double _w[2];
- double _P[2][2];
- int64_t _startMs;
- int64_t _prevMs;
- uint32_t _firstTimestamp;
- int32_t _wrapArounds;
- int64_t _prevUnwrappedTimestamp;
- int64_t _prevWrapTimestamp;
- const double _lambda;
- bool _firstAfterReset;
- uint32_t _packetCount;
- const uint32_t _startUpFilterDelayInPackets;
-
- double _detectorAccumulatorPos;
- double _detectorAccumulatorNeg;
- const double _alarmThreshold;
- const double _accDrift;
- const double _accMaxError;
- const double _P11;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_EXTRAPOLATOR_H_
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/timing.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/timing.cc
index 98a69e962fd..af0e35c4e1f 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/timing.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/timing.cc
@@ -10,25 +10,18 @@
#include "webrtc/modules/video_coding/main/source/timing.h"
-
#include "webrtc/modules/video_coding/main/source/internal_defines.h"
#include "webrtc/modules/video_coding/main/source/jitter_buffer_common.h"
-#include "webrtc/modules/video_coding/main/source/timestamp_extrapolator.h"
#include "webrtc/system_wrappers/interface/clock.h"
-#include "webrtc/system_wrappers/interface/trace.h"
-
+#include "webrtc/system_wrappers/interface/timestamp_extrapolator.h"
namespace webrtc {
VCMTiming::VCMTiming(Clock* clock,
- int32_t vcm_id,
- int32_t timing_id,
VCMTiming* master_timing)
: crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
- vcm_id_(vcm_id),
clock_(clock),
- timing_id_(timing_id),
master_(false),
ts_extrapolator_(),
codec_timer_(),
@@ -40,7 +33,7 @@ VCMTiming::VCMTiming(Clock* clock,
prev_frame_timestamp_(0) {
if (master_timing == NULL) {
master_ = true;
- ts_extrapolator_ = new VCMTimestampExtrapolator(clock_, vcm_id, timing_id);
+ ts_extrapolator_ = new TimestampExtrapolator(clock_->TimeInMilliseconds());
} else {
ts_extrapolator_ = master_timing->ts_extrapolator_;
}
@@ -55,7 +48,7 @@ VCMTiming::~VCMTiming() {
void VCMTiming::Reset() {
CriticalSectionScoped cs(crit_sect_);
- ts_extrapolator_->Reset();
+ ts_extrapolator_->Reset(clock_->TimeInMilliseconds());
codec_timer_.Reset();
render_delay_ms_ = kDefaultRenderDelayMs;
min_playout_delay_ms_ = 0;
@@ -81,11 +74,6 @@ void VCMTiming::set_min_playout_delay(uint32_t min_playout_delay_ms) {
void VCMTiming::SetJitterDelay(uint32_t jitter_delay_ms) {
CriticalSectionScoped cs(crit_sect_);
if (jitter_delay_ms != jitter_delay_ms_) {
- if (master_) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, timing_id_),
- "Desired jitter buffer level: %u ms", jitter_delay_ms);
- }
jitter_delay_ms_ = jitter_delay_ms;
// When in initial state, set current delay to minimum delay.
if (current_delay_ms_ == 0) {
@@ -152,39 +140,21 @@ int32_t VCMTiming::StopDecodeTimer(uint32_t time_stamp,
int64_t start_time_ms,
int64_t now_ms) {
CriticalSectionScoped cs(crit_sect_);
- const int32_t max_dec_time = MaxDecodeTimeMs();
int32_t time_diff_ms = codec_timer_.StopTimer(start_time_ms, now_ms);
- if (time_diff_ms < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, VCMId(vcm_id_,
- timing_id_), "Codec timer error: %d", time_diff_ms);
- assert(false);
- }
+ assert(time_diff_ms >= 0);
last_decode_ms_ = time_diff_ms;
- if (master_) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(vcm_id_,
- timing_id_),
- "Frame decoded: time_stamp=%u dec_time=%d max_dec_time=%u, at %u",
- time_stamp, time_diff_ms, max_dec_time, MaskWord64ToUWord32(now_ms));
- }
return 0;
}
void VCMTiming::IncomingTimestamp(uint32_t time_stamp, int64_t now_ms) {
CriticalSectionScoped cs(crit_sect_);
- ts_extrapolator_->Update(now_ms, time_stamp, master_);
+ ts_extrapolator_->Update(now_ms, time_stamp);
}
int64_t VCMTiming::RenderTimeMs(uint32_t frame_timestamp, int64_t now_ms)
const {
CriticalSectionScoped cs(crit_sect_);
const int64_t render_time_ms = RenderTimeMsInternal(frame_timestamp, now_ms);
- if (master_) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(vcm_id_,
- timing_id_), "Render frame %u at %u. Render delay %u",
- "jitter delay %u, max decode time %u, playout delay %u",
- frame_timestamp, MaskWord64ToUWord32(render_time_ms), render_delay_ms_,
- jitter_delay_ms_, MaxDecodeTimeMs(), min_playout_delay_ms_);
- }
return render_time_ms;
}
@@ -192,11 +162,6 @@ int64_t VCMTiming::RenderTimeMsInternal(uint32_t frame_timestamp,
int64_t now_ms) const {
int64_t estimated_complete_time_ms =
ts_extrapolator_->ExtrapolateLocalTime(frame_timestamp);
- if (master_) {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, timing_id_), "ExtrapolateLocalTime(%u)=%u ms",
- frame_timestamp, MaskWord64ToUWord32(estimated_complete_time_ms));
- }
if (estimated_complete_time_ms == -1) {
estimated_complete_time_ms = now_ms;
}
@@ -210,11 +175,7 @@ int64_t VCMTiming::RenderTimeMsInternal(uint32_t frame_timestamp,
int32_t VCMTiming::MaxDecodeTimeMs(FrameType frame_type /*= kVideoFrameDelta*/)
const {
const int32_t decode_time_ms = codec_timer_.RequiredDecodeTimeMs(frame_type);
- if (decode_time_ms < 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, VCMId(vcm_id_,
- timing_id_), "Negative maximum decode time: %d", decode_time_ms);
- return -1;
- }
+ assert(decode_time_ms >= 0);
return decode_time_ms;
}
@@ -254,11 +215,6 @@ uint32_t VCMTiming::TargetVideoDelay() const {
}
uint32_t VCMTiming::TargetDelayInternal() const {
- WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
- VCMId(vcm_id_, timing_id_),
- "Delay: min_playout=%u jitter=%u max_decode=%u render=%u",
- min_playout_delay_ms_, jitter_delay_ms_, MaxDecodeTimeMs(),
- render_delay_ms_);
return std::max(min_playout_delay_ms_,
jitter_delay_ms_ + MaxDecodeTimeMs() + render_delay_ms_);
}
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/timing.h b/chromium/third_party/webrtc/modules/video_coding/main/source/timing.h
index eb251b71153..1dca5e605af 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/timing.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/timing.h
@@ -18,15 +18,13 @@
namespace webrtc {
class Clock;
-class VCMTimestampExtrapolator;
+class TimestampExtrapolator;
class VCMTiming {
public:
// The primary timing component should be passed
// if this is the dual timing component.
VCMTiming(Clock* clock,
- int32_t vcm_id = 0,
- int32_t timing_id = 0,
VCMTiming* master_timing = NULL);
~VCMTiming();
@@ -101,11 +99,9 @@ class VCMTiming {
private:
CriticalSectionWrapper* crit_sect_;
- int32_t vcm_id_;
Clock* clock_;
- int32_t timing_id_;
bool master_;
- VCMTimestampExtrapolator* ts_extrapolator_;
+ TimestampExtrapolator* ts_extrapolator_;
VCMCodecTimer codec_timer_;
uint32_t render_delay_ms_;
uint32_t min_playout_delay_ms_;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding.gypi b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding.gypi
index b4f6cb7b1c8..f19a585523d 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding.gypi
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding.gypi
@@ -48,7 +48,6 @@
'receiver.h',
'rtt_filter.h',
'session_info.h',
- 'timestamp_extrapolator.h',
'timestamp_map.h',
'timing.h',
'video_coding_impl.h',
@@ -72,7 +71,6 @@
'receiver.cc',
'rtt_filter.cc',
'session_info.cc',
- 'timestamp_extrapolator.cc',
'timestamp_map.cc',
'timing.cc',
'video_coding_impl.cc',
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.cc
index 1decc2f112f..5b93a656717 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.cc
@@ -16,7 +16,6 @@
#include "webrtc/modules/video_coding/main/source/packet.h"
#include "webrtc/modules/video_coding/main/source/video_coding_impl.h"
#include "webrtc/system_wrappers/interface/clock.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
@@ -45,15 +44,44 @@ VCMProcessTimer::Processed() {
} // namespace vcm
namespace {
+// This wrapper provides a way to modify the callback without the need to expose
+// a register method all the way down to the function calling it.
+class EncodedImageCallbackWrapper : public EncodedImageCallback {
+ public:
+ EncodedImageCallbackWrapper()
+ : cs_(CriticalSectionWrapper::CreateCriticalSection()), callback_(NULL) {}
+
+ virtual ~EncodedImageCallbackWrapper() {}
+
+ void Register(EncodedImageCallback* callback) {
+ CriticalSectionScoped cs(cs_.get());
+ callback_ = callback;
+ }
+
+ // TODO(andresp): Change to void as return value is ignored.
+ virtual int32_t Encoded(EncodedImage& encoded_image,
+ const CodecSpecificInfo* codec_specific_info,
+ const RTPFragmentationHeader* fragmentation) {
+ CriticalSectionScoped cs(cs_.get());
+ if (callback_)
+ return callback_->Encoded(
+ encoded_image, codec_specific_info, fragmentation);
+ return 0;
+ }
+
+ private:
+ scoped_ptr<CriticalSectionWrapper> cs_;
+ EncodedImageCallback* callback_ GUARDED_BY(cs_);
+};
+
class VideoCodingModuleImpl : public VideoCodingModule {
public:
- VideoCodingModuleImpl(const int32_t id,
- Clock* clock,
+ VideoCodingModuleImpl(Clock* clock,
EventFactory* event_factory,
bool owns_event_factory)
: VideoCodingModule(),
- sender_(new vcm::VideoSender(id, clock)),
- receiver_(new vcm::VideoReceiver(id, clock, event_factory)),
+ sender_(new vcm::VideoSender(clock, &post_encode_callback_)),
+ receiver_(new vcm::VideoReceiver(clock, event_factory)),
own_event_factory_(owns_event_factory ? event_factory : NULL) {}
virtual ~VideoCodingModuleImpl() {
@@ -194,7 +222,8 @@ class VideoCodingModuleImpl : public VideoCodingModule {
}
virtual int StopDebugRecording() OVERRIDE {
- return sender_->StopDebugRecording();
+ sender_->StopDebugRecording();
+ return VCM_OK;
}
virtual void SuspendBelowMinBitrate() {
@@ -326,10 +355,11 @@ class VideoCodingModuleImpl : public VideoCodingModule {
virtual void RegisterPostEncodeImageCallback(
EncodedImageCallback* observer) OVERRIDE {
- sender_->RegisterPostEncodeImageCallback(observer);
+ post_encode_callback_.Register(observer);
}
private:
+ EncodedImageCallbackWrapper post_encode_callback_;
scoped_ptr<vcm::VideoSender> sender_;
scoped_ptr<vcm::VideoReceiver> receiver_;
scoped_ptr<EventFactory> own_event_factory_;
@@ -354,17 +384,16 @@ int32_t VideoCodingModule::Codec(VideoCodecType codecType, VideoCodec* codec) {
return VCMCodecDataBase::Codec(codecType, codec) ? 0 : -1;
}
-VideoCodingModule* VideoCodingModule::Create(const int32_t id) {
+VideoCodingModule* VideoCodingModule::Create() {
return new VideoCodingModuleImpl(
- id, Clock::GetRealTimeClock(), new EventFactoryImpl, true);
+ Clock::GetRealTimeClock(), new EventFactoryImpl, true);
}
-VideoCodingModule* VideoCodingModule::Create(const int32_t id,
- Clock* clock,
+VideoCodingModule* VideoCodingModule::Create(Clock* clock,
EventFactory* event_factory) {
assert(clock);
assert(event_factory);
- return new VideoCodingModuleImpl(id, clock, event_factory, false);
+ return new VideoCodingModuleImpl(clock, event_factory, false);
}
void VideoCodingModule::Destroy(VideoCodingModule* module) {
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.h b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.h
index d9564c04bd9..bf0bc7905ba 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.h
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_impl.h
@@ -32,6 +32,8 @@ class EncodedFrameObserver;
namespace vcm {
+class DebugRecorder;
+
class VCMProcessTimer {
public:
VCMProcessTimer(uint32_t periodMs, Clock* clock)
@@ -52,7 +54,8 @@ class VideoSender {
public:
typedef VideoCodingModule::SenderNackMode SenderNackMode;
- VideoSender(const int32_t id, Clock* clock);
+ VideoSender(Clock* clock, EncodedImageCallback* post_encode_callback);
+
~VideoSender();
int32_t InitializeSender();
@@ -68,7 +71,8 @@ class VideoSender {
uint8_t payloadType,
bool internalSource);
- int32_t CodecConfigParameters(uint8_t* buffer, int32_t size);
+ int32_t CodecConfigParameters(uint8_t* buffer, int32_t size) const;
+ int32_t SentFrameCount(VCMFrameCount* frameCount);
int Bitrate(unsigned int* bitrate) const;
int FrameRate(unsigned int* framerate) const;
@@ -88,7 +92,6 @@ class VideoSender {
int32_t IntraFrameRequest(int stream_index);
int32_t EnableFrameDropper(bool enable);
- int32_t SentFrameCount(VCMFrameCount* frameCount) const;
int SetSenderNackMode(SenderNackMode mode);
int SetSenderReferenceSelection(bool enable);
@@ -96,21 +99,19 @@ class VideoSender {
int SetSenderKeyFramePeriod(int periodMs);
int StartDebugRecording(const char* file_name_utf8);
- int StopDebugRecording();
+ void StopDebugRecording();
void SuspendBelowMinBitrate();
bool VideoSuspended() const;
- void RegisterPostEncodeImageCallback(
- EncodedImageCallback* post_encode_callback);
-
int32_t TimeUntilNextProcess();
int32_t Process();
private:
- int32_t _id;
Clock* clock_;
+ scoped_ptr<DebugRecorder> recorder_;
+
scoped_ptr<CriticalSectionWrapper> process_crit_sect_;
CriticalSectionWrapper* _sendCritSect;
VCMGenericEncoder* _encoder;
@@ -118,17 +119,19 @@ class VideoSender {
std::vector<FrameType> _nextFrameTypes;
media_optimization::MediaOptimization _mediaOpt;
VCMSendStatisticsCallback* _sendStatsCallback;
- FILE* _encoderInputFile;
VCMCodecDataBase _codecDataBase;
bool frame_dropper_enabled_;
VCMProcessTimer _sendStatsTimer;
+
+ VCMQMSettingsCallback* qm_settings_callback_;
+ VCMProtectionCallback* protection_callback_;
};
class VideoReceiver {
public:
typedef VideoCodingModule::ReceiverRobustness ReceiverRobustness;
- VideoReceiver(const int32_t id, Clock* clock, EventFactory* event_factory);
+ VideoReceiver(Clock* clock, EventFactory* event_factory);
~VideoReceiver();
int32_t InitializeReceiver();
@@ -196,7 +199,6 @@ class VideoReceiver {
// in any frame
};
- int32_t _id;
Clock* clock_;
scoped_ptr<CriticalSectionWrapper> process_crit_sect_;
CriticalSectionWrapper* _receiveCritSect;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_robustness_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_robustness_unittest.cc
index fbb511448c4..435e18202f7 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_robustness_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_robustness_unittest.cc
@@ -35,7 +35,7 @@ class VCMRobustnessTest : public ::testing::Test {
virtual void SetUp() {
clock_.reset(new SimulatedClock(0));
ASSERT_TRUE(clock_.get() != NULL);
- vcm_ = VideoCodingModule::Create(0, clock_.get(), &event_factory_);
+ vcm_ = VideoCodingModule::Create(clock_.get(), &event_factory_);
ASSERT_TRUE(vcm_ != NULL);
ASSERT_EQ(0, vcm_->InitializeReceiver());
const size_t kMaxNackListSize = 250;
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_test.gypi b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_test.gypi
index a64e02d02c8..b0fe510cf9c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_test.gypi
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/video_coding_test.gypi
@@ -20,6 +20,7 @@
'<(webrtc_root)/test/test.gyp:test_support',
'<(webrtc_root)/test/metrics.gyp:metrics',
'<(webrtc_root)/common_video/common_video.gyp:common_video',
+ '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:field_trial_default',
],
'sources': [
# headers
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver.cc
index 68668eae7de..5bc1c90f52c 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver.cc
@@ -16,7 +16,7 @@
#include "webrtc/modules/video_coding/main/source/packet.h"
#include "webrtc/modules/video_coding/main/source/video_coding_impl.h"
#include "webrtc/system_wrappers/interface/clock.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
// #define DEBUG_DECODER_BIT_STREAM
@@ -24,18 +24,15 @@
namespace webrtc {
namespace vcm {
-VideoReceiver::VideoReceiver(const int32_t id,
- Clock* clock,
- EventFactory* event_factory)
- : _id(id),
- clock_(clock),
+VideoReceiver::VideoReceiver(Clock* clock, EventFactory* event_factory)
+ : clock_(clock),
process_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
_receiveCritSect(CriticalSectionWrapper::CreateCriticalSection()),
_receiverInited(false),
- _timing(clock_, id, 1),
- _dualTiming(clock_, id, 2, &_timing),
- _receiver(&_timing, clock_, event_factory, id, 1, true),
- _dualReceiver(&_dualTiming, clock_, event_factory, id, 2, false),
+ _timing(clock_),
+ _dualTiming(clock_, &_timing),
+ _receiver(&_timing, clock_, event_factory, true),
+ _dualReceiver(&_dualTiming, clock_, event_factory, false),
_decodedFrameCallback(_timing, clock_),
_dualDecodedFrameCallback(_dualTiming, clock_),
_frameTypeCallback(NULL),
@@ -53,7 +50,7 @@ VideoReceiver::VideoReceiver(const int32_t id,
_scheduleKeyRequest(false),
max_nack_list_size_(0),
pre_decode_image_callback_(NULL),
- _codecDataBase(id),
+ _codecDataBase(),
_receiveStatsTimer(1000, clock_),
_retransmissionTimer(10, clock_),
_keyRequestTimer(500, clock_) {
@@ -121,8 +118,12 @@ int32_t VideoReceiver::Process() {
// Key frame requests
if (_keyRequestTimer.TimeUntilProcess() == 0) {
_keyRequestTimer.Processed();
- CriticalSectionScoped cs(process_crit_sect_.get());
- if (_scheduleKeyRequest && _frameTypeCallback != NULL) {
+ bool request_key_frame = false;
+ {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ request_key_frame = _scheduleKeyRequest && _frameTypeCallback != NULL;
+ }
+ if (request_key_frame) {
const int32_t ret = RequestKeyFrame();
if (ret != VCM_OK && returnValue == VCM_OK) {
returnValue = ret;
@@ -135,16 +136,24 @@ int32_t VideoReceiver::Process() {
// disabled when NACK is off.
if (_retransmissionTimer.TimeUntilProcess() == 0) {
_retransmissionTimer.Processed();
- CriticalSectionScoped cs(process_crit_sect_.get());
- if (_packetRequestCallback != NULL) {
- uint16_t length = max_nack_list_size_;
+ bool callback_registered = false;
+ uint16_t length;
+ {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ length = max_nack_list_size_;
+ callback_registered = _packetRequestCallback != NULL;
+ }
+ if (callback_registered && length > 0) {
std::vector<uint16_t> nackList(length);
const int32_t ret = NackList(&nackList[0], &length);
if (ret != VCM_OK && returnValue == VCM_OK) {
returnValue = ret;
}
- if (length > 0) {
- _packetRequestCallback->ResendPackets(&nackList[0], length);
+ if (ret == VCM_OK && length > 0) {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ if (_packetRequestCallback != NULL) {
+ _packetRequestCallback->ResendPackets(&nackList[0], length);
+ }
}
}
}
@@ -434,17 +443,9 @@ int32_t VideoReceiver::RequestSliceLossIndication(
const int32_t ret =
_frameTypeCallback->SliceLossIndicationRequest(pictureID);
if (ret < 0) {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Failed to request key frame");
return ret;
}
} else {
- WEBRTC_TRACE(webrtc::kTraceWarning,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "No frame type request callback registered");
return VCM_MISSING_CALLBACK;
}
return VCM_OK;
@@ -452,22 +453,14 @@ int32_t VideoReceiver::RequestSliceLossIndication(
int32_t VideoReceiver::RequestKeyFrame() {
TRACE_EVENT0("webrtc", "RequestKeyFrame");
- CriticalSectionScoped cs(process_crit_sect_.get());
+ CriticalSectionScoped process_cs(process_crit_sect_.get());
if (_frameTypeCallback != NULL) {
const int32_t ret = _frameTypeCallback->RequestKeyFrame();
if (ret < 0) {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Failed to request key frame");
return ret;
}
_scheduleKeyRequest = false;
} else {
- WEBRTC_TRACE(webrtc::kTraceWarning,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "No frame type request callback registered");
return VCM_MISSING_CALLBACK;
}
return VCM_OK;
@@ -490,29 +483,18 @@ int32_t VideoReceiver::DecodeDualFrame(uint16_t maxWaitTimeMs) {
VCMEncodedFrame* dualFrame =
_dualReceiver.FrameForDecoding(maxWaitTimeMs, dummyRenderTime);
if (dualFrame != NULL && _dualDecoder != NULL) {
- WEBRTC_TRACE(webrtc::kTraceStream,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Decoding frame %u with dual decoder",
- dualFrame->TimeStamp());
// Decode dualFrame and try to catch up
int32_t ret =
_dualDecoder->Decode(*dualFrame, clock_->TimeInMilliseconds());
if (ret != WEBRTC_VIDEO_CODEC_OK) {
- WEBRTC_TRACE(webrtc::kTraceWarning,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Failed to decode frame with dual decoder");
+ LOG(LS_ERROR) << "Failed to decode frame with dual decoder. Error code: "
+ << ret;
_dualReceiver.ReleaseFrame(dualFrame);
return VCM_CODEC_ERROR;
}
if (_receiver.DualDecoderCaughtUp(dualFrame, _dualReceiver)) {
// Copy the complete decoder state of the dual decoder
// to the primary decoder.
- WEBRTC_TRACE(webrtc::kTraceStream,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Dual decoder caught up");
_codecDataBase.CopyDecoder(*_dualDecoder);
_codecDataBase.ReleaseDecoder(_dualDecoder);
_dualDecoder = NULL;
@@ -547,62 +529,65 @@ int32_t VideoReceiver::Decode(const VCMEncodedFrame& frame) {
int32_t ret = _decoder->Decode(frame, clock_->TimeInMilliseconds());
// Check for failed decoding, run frame type request callback if needed.
+ bool request_key_frame = false;
if (ret < 0) {
if (ret == VCM_ERROR_REQUEST_SLI) {
return RequestSliceLossIndication(
_decodedFrameCallback.LastReceivedPictureID() + 1);
} else {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Failed to decode frame %u, requesting key frame",
- frame.TimeStamp());
- ret = RequestKeyFrame();
+ request_key_frame = true;
}
} else if (ret == VCM_REQUEST_SLI) {
ret = RequestSliceLossIndication(
_decodedFrameCallback.LastReceivedPictureID() + 1);
}
if (!frame.Complete() || frame.MissingFrame()) {
- CriticalSectionScoped cs(process_crit_sect_.get());
switch (_keyRequestMode) {
case kKeyOnKeyLoss: {
if (frame.FrameType() == kVideoFrameKey) {
- _scheduleKeyRequest = true;
- return VCM_OK;
+ request_key_frame = true;
+ ret = VCM_OK;
}
break;
}
case kKeyOnLoss: {
- _scheduleKeyRequest = true;
- return VCM_OK;
+ request_key_frame = true;
+ ret = VCM_OK;
}
default:
break;
}
}
+ if (request_key_frame) {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ _scheduleKeyRequest = true;
+ }
TRACE_EVENT_ASYNC_END0("webrtc", "Video", frame.TimeStamp());
return ret;
}
// Reset the decoder state
int32_t VideoReceiver::ResetDecoder() {
- CriticalSectionScoped cs(_receiveCritSect);
- if (_decoder != NULL) {
- _receiver.Initialize();
- _timing.Reset();
- {
- CriticalSectionScoped cs(process_crit_sect_.get());
- _scheduleKeyRequest = false;
+ bool reset_key_request = false;
+ {
+ CriticalSectionScoped cs(_receiveCritSect);
+ if (_decoder != NULL) {
+ _receiver.Initialize();
+ _timing.Reset();
+ reset_key_request = true;
+ _decoder->Reset();
+ }
+ if (_dualReceiver.State() != kPassive) {
+ _dualReceiver.Initialize();
+ }
+ if (_dualDecoder != NULL) {
+ _codecDataBase.ReleaseDecoder(_dualDecoder);
+ _dualDecoder = NULL;
}
- _decoder->Reset();
- }
- if (_dualReceiver.State() != kPassive) {
- _dualReceiver.Initialize();
}
- if (_dualDecoder != NULL) {
- _codecDataBase.ReleaseDecoder(_dualDecoder);
- _dualDecoder = NULL;
+ if (reset_key_request) {
+ CriticalSectionScoped cs(process_crit_sect_.get());
+ _scheduleKeyRequest = false;
}
return VCM_OK;
}
@@ -710,25 +695,8 @@ int32_t VideoReceiver::NackList(uint16_t* nackList, uint16_t* size) {
nackStatus = _dualReceiver.NackList(nackList, *size, &nack_list_length);
}
*size = nack_list_length;
-
- switch (nackStatus) {
- case kNackNeedMoreMemory: {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Out of memory");
- return VCM_MEMORY;
- }
- case kNackKeyFrameRequest: {
- CriticalSectionScoped cs(_receiveCritSect);
- WEBRTC_TRACE(webrtc::kTraceWarning,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Failed to get NACK list, requesting key frame");
+ if (nackStatus == kNackKeyFrameRequest) {
return RequestKeyFrame();
- }
- default:
- break;
}
return VCM_OK;
}
@@ -763,14 +731,17 @@ int VideoReceiver::SetReceiverRobustnessMode(
_keyRequestMode = kKeyOnError; // TODO(hlundin): On long NACK list?
break;
case VideoCodingModule::kSoftNack:
+#if 1
assert(false); // TODO(hlundin): Not completed.
return VCM_NOT_IMPLEMENTED;
+#else
// Enable hybrid NACK/FEC. Always wait for retransmissions and don't add
// extra delay when RTT is above kLowRttNackMs.
_receiver.SetNackMode(kNack, media_optimization::kLowRttNackMs, -1);
_dualReceiver.SetNackMode(kNoNack, -1, -1);
_keyRequestMode = kKeyOnError;
break;
+#endif
case VideoCodingModule::kDualDecoder:
if (decode_error_mode == kNoErrors) {
return VCM_PARAMETER_ERROR;
@@ -783,14 +754,17 @@ int VideoReceiver::SetReceiverRobustnessMode(
_keyRequestMode = kKeyOnError;
break;
case VideoCodingModule::kReferenceSelection:
+#if 1
assert(false); // TODO(hlundin): Not completed.
return VCM_NOT_IMPLEMENTED;
+#else
if (decode_error_mode == kNoErrors) {
return VCM_PARAMETER_ERROR;
}
_receiver.SetNackMode(kNoNack, -1, -1);
_dualReceiver.SetNackMode(kNoNack, -1, -1);
break;
+#endif
}
_receiver.SetDecodeErrorMode(decode_error_mode);
// The dual decoder should never decode with errors.
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver_unittest.cc
index 4fd524d4bce..502dfa9dd2f 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/video_receiver_unittest.cc
@@ -33,7 +33,7 @@ class TestVideoReceiver : public ::testing::Test {
TestVideoReceiver() : clock_(0) {}
virtual void SetUp() {
- receiver_.reset(new VideoReceiver(0, &clock_, &event_factory_));
+ receiver_.reset(new VideoReceiver(&clock_, &event_factory_));
EXPECT_EQ(0, receiver_->InitializeReceiver());
EXPECT_EQ(0,
receiver_->RegisterExternalDecoder(
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/video_sender.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/video_sender.cc
index 948218b83a6..38ecc5479a4 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/video_sender.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/video_sender.cc
@@ -17,30 +17,66 @@
#include "webrtc/modules/video_coding/main/source/encoded_frame.h"
#include "webrtc/modules/video_coding/main/source/video_coding_impl.h"
#include "webrtc/system_wrappers/interface/clock.h"
+#include "webrtc/system_wrappers/interface/logging.h"
namespace webrtc {
namespace vcm {
-VideoSender::VideoSender(const int32_t id, Clock* clock)
- : _id(id),
- clock_(clock),
+class DebugRecorder {
+ public:
+ DebugRecorder()
+ : cs_(CriticalSectionWrapper::CreateCriticalSection()), file_(NULL) {}
+
+ ~DebugRecorder() { Stop(); }
+
+ int Start(const char* file_name_utf8) {
+ CriticalSectionScoped cs(cs_.get());
+ if (file_)
+ fclose(file_);
+ file_ = fopen(file_name_utf8, "wb");
+ if (!file_)
+ return VCM_GENERAL_ERROR;
+ return VCM_OK;
+ }
+
+ void Stop() {
+ CriticalSectionScoped cs(cs_.get());
+ if (file_) {
+ fclose(file_);
+ file_ = NULL;
+ }
+ }
+
+ void Add(const I420VideoFrame& frame) {
+ CriticalSectionScoped cs(cs_.get());
+ if (file_)
+ PrintI420VideoFrame(frame, file_);
+ }
+
+ private:
+ scoped_ptr<CriticalSectionWrapper> cs_;
+ FILE* file_ GUARDED_BY(cs_);
+};
+
+VideoSender::VideoSender(Clock* clock,
+ EncodedImageCallback* post_encode_callback)
+ : clock_(clock),
+ recorder_(new DebugRecorder()),
process_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
_sendCritSect(CriticalSectionWrapper::CreateCriticalSection()),
_encoder(),
- _encodedFrameCallback(),
+ _encodedFrameCallback(post_encode_callback),
_nextFrameTypes(1, kVideoFrameDelta),
- _mediaOpt(id, clock_),
+ _mediaOpt(clock_),
_sendStatsCallback(NULL),
- _encoderInputFile(NULL),
- _codecDataBase(id),
+ _codecDataBase(),
frame_dropper_enabled_(true),
- _sendStatsTimer(1000, clock_) {}
+ _sendStatsTimer(1000, clock_),
+ qm_settings_callback_(NULL),
+ protection_callback_(NULL) {}
VideoSender::~VideoSender() {
delete _sendCritSect;
- if (_encoderInputFile != NULL) {
- fclose(_encoderInputFile);
- }
}
int32_t VideoSender::Process() {
@@ -70,8 +106,6 @@ int32_t VideoSender::InitializeSender() {
_codecDataBase.ResetSender();
_encoder = NULL;
_encodedFrameCallback.SetTransportCallback(NULL);
- // setting default bitRate and frameRate to 0
- _mediaOpt.SetEncodingData(kVideoCodecUnknown, 0, 0, 0, 0, 0, 0);
_mediaOpt.Reset(); // Resetting frame dropper
return VCM_OK;
}
@@ -97,10 +131,8 @@ int32_t VideoSender::RegisterSendCodec(const VideoCodec* sendCodec,
_encoder = _codecDataBase.GetEncoder();
if (!ret) {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Failed to initialize encoder");
+ LOG(LS_ERROR) << "Failed to initialize the encoder with payload name "
+ << sendCodec->plName << ". Error code: " << ret;
return VCM_CODEC_ERROR;
}
@@ -125,9 +157,8 @@ int32_t VideoSender::RegisterSendCodec(const VideoCodec* sendCodec,
sendCodec->startBitrate * 1000,
sendCodec->width,
sendCodec->height,
- numLayers);
- _mediaOpt.set_max_payload_size(maxPayloadSize);
-
+ numLayers,
+ maxPayloadSize);
return VCM_OK;
}
@@ -171,7 +202,8 @@ int32_t VideoSender::RegisterExternalEncoder(VideoEncoder* externalEncoder,
}
// Get codec config parameters
-int32_t VideoSender::CodecConfigParameters(uint8_t* buffer, int32_t size) {
+int32_t VideoSender::CodecConfigParameters(uint8_t* buffer,
+ int32_t size) const {
CriticalSectionScoped cs(_sendCritSect);
if (_encoder != NULL) {
return _encoder->CodecConfigParameters(buffer, size);
@@ -179,6 +211,14 @@ int32_t VideoSender::CodecConfigParameters(uint8_t* buffer, int32_t size) {
return VCM_UNINITIALIZED;
}
+// TODO(andresp): Make const once media_opt is thread-safe and this has a
+// pointer to it.
+int32_t VideoSender::SentFrameCount(VCMFrameCount* frameCount) {
+ CriticalSectionScoped cs(_sendCritSect);
+ *frameCount = _mediaOpt.SentFrameCount();
+ return VCM_OK;
+}
+
// Get encode bitrate
int VideoSender::Bitrate(unsigned int* bitrate) const {
CriticalSectionScoped cs(_sendCritSect);
@@ -208,8 +248,11 @@ int32_t VideoSender::SetChannelParameters(uint32_t target_bitrate,
int32_t ret = 0;
{
CriticalSectionScoped sendCs(_sendCritSect);
- uint32_t targetRate =
- _mediaOpt.SetTargetRates(target_bitrate, lossRate, rtt);
+ uint32_t targetRate = _mediaOpt.SetTargetRates(target_bitrate,
+ lossRate,
+ rtt,
+ protection_callback_,
+ qm_settings_callback_);
if (_encoder != NULL) {
ret = _encoder->SetChannelParameters(lossRate, rtt);
if (ret < 0) {
@@ -247,17 +290,19 @@ int32_t VideoSender::RegisterSendStatisticsCallback(
// Register a video quality settings callback which will be called when frame
// rate/dimensions need to be updated for video quality optimization
int32_t VideoSender::RegisterVideoQMCallback(
- VCMQMSettingsCallback* videoQMSettings) {
+ VCMQMSettingsCallback* qm_settings_callback) {
CriticalSectionScoped cs(_sendCritSect);
- return _mediaOpt.RegisterVideoQMCallback(videoQMSettings);
+ qm_settings_callback_ = qm_settings_callback;
+ _mediaOpt.EnableQM(qm_settings_callback_ != NULL);
+ return VCM_OK;
}
// Register a video protection callback which will be called to deliver the
// requested FEC rate and NACK status (on/off).
int32_t VideoSender::RegisterProtectionCallback(
- VCMProtectionCallback* protection) {
+ VCMProtectionCallback* protection_callback) {
CriticalSectionScoped cs(_sendCritSect);
- _mediaOpt.RegisterProtectionCallback(protection);
+ protection_callback_ = protection_callback;
return VCM_OK;
}
@@ -314,33 +359,19 @@ int32_t VideoSender::AddVideoFrame(const I420VideoFrame& videoFrame,
if (_nextFrameTypes[0] == kFrameEmpty) {
return VCM_OK;
}
- _mediaOpt.UpdateIncomingFrameRate();
-
if (_mediaOpt.DropFrame()) {
- WEBRTC_TRACE(webrtc::kTraceStream,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Drop frame due to bitrate");
- } else {
- _mediaOpt.UpdateContentData(contentMetrics);
- int32_t ret =
- _encoder->Encode(videoFrame, codecSpecificInfo, _nextFrameTypes);
- if (_encoderInputFile != NULL) {
- if (PrintI420VideoFrame(videoFrame, _encoderInputFile) < 0) {
- return -1;
- }
- }
- if (ret < 0) {
- WEBRTC_TRACE(webrtc::kTraceError,
- webrtc::kTraceVideoCoding,
- VCMId(_id),
- "Encode error: %d",
- ret);
- return ret;
- }
- for (size_t i = 0; i < _nextFrameTypes.size(); ++i) {
- _nextFrameTypes[i] = kVideoFrameDelta; // Default frame type.
- }
+ return VCM_OK;
+ }
+ _mediaOpt.UpdateContentData(contentMetrics);
+ int32_t ret =
+ _encoder->Encode(videoFrame, codecSpecificInfo, _nextFrameTypes);
+ recorder_->Add(videoFrame);
+ if (ret < 0) {
+ LOG(LS_ERROR) << "Failed to encode frame. Error code: " << ret;
+ return ret;
+ }
+ for (size_t i = 0; i < _nextFrameTypes.size(); ++i) {
+ _nextFrameTypes[i] = kVideoFrameDelta; // Default frame type.
}
return VCM_OK;
}
@@ -369,11 +400,6 @@ int32_t VideoSender::EnableFrameDropper(bool enable) {
return VCM_OK;
}
-int32_t VideoSender::SentFrameCount(VCMFrameCount* frameCount) const {
- CriticalSectionScoped cs(_sendCritSect);
- return _mediaOpt.SentFrameCount(frameCount);
-}
-
int VideoSender::SetSenderNackMode(SenderNackMode mode) {
CriticalSectionScoped cs(_sendCritSect);
@@ -406,20 +432,11 @@ int VideoSender::SetSenderKeyFramePeriod(int periodMs) {
}
int VideoSender::StartDebugRecording(const char* file_name_utf8) {
- CriticalSectionScoped cs(_sendCritSect);
- _encoderInputFile = fopen(file_name_utf8, "wb");
- if (_encoderInputFile == NULL)
- return VCM_GENERAL_ERROR;
- return VCM_OK;
+ return recorder_->Start(file_name_utf8);
}
-int VideoSender::StopDebugRecording() {
- CriticalSectionScoped cs(_sendCritSect);
- if (_encoderInputFile != NULL) {
- fclose(_encoderInputFile);
- _encoderInputFile = NULL;
- }
- return VCM_OK;
+void VideoSender::StopDebugRecording() {
+ recorder_->Stop();
}
void VideoSender::SuspendBelowMinBitrate() {
@@ -443,14 +460,7 @@ void VideoSender::SuspendBelowMinBitrate() {
bool VideoSender::VideoSuspended() const {
CriticalSectionScoped cs(_sendCritSect);
- return _mediaOpt.video_suspended();
+ return _mediaOpt.IsVideoSuspended();
}
-
-void VideoSender::RegisterPostEncodeImageCallback(
- EncodedImageCallback* observer) {
- CriticalSectionScoped cs(_sendCritSect);
- _encodedFrameCallback.RegisterPostEncodeImageCallback(observer);
-}
-
} // namespace vcm
} // namespace webrtc
diff --git a/chromium/third_party/webrtc/modules/video_coding/main/source/video_sender_unittest.cc b/chromium/third_party/webrtc/modules/video_coding/main/source/video_sender_unittest.cc
index 513a99ee74b..67b3e7aeccb 100644
--- a/chromium/third_party/webrtc/modules/video_coding/main/source/video_sender_unittest.cc
+++ b/chromium/third_party/webrtc/modules/video_coding/main/source/video_sender_unittest.cc
@@ -52,7 +52,7 @@ struct Vp8StreamInfo {
MATCHER_P(MatchesVp8StreamInfo, expected, "") {
bool res = true;
for (int tl = 0; tl < kMaxNumberOfTemporalLayers; ++tl) {
- if (abs(expected.framerate_fps[tl] - arg.framerate_fps[tl]) > 0.5) {
+ if (fabs(expected.framerate_fps[tl] - arg.framerate_fps[tl]) > 0.5) {
*result_listener << " framerate_fps[" << tl
<< "] = " << arg.framerate_fps[tl] << " (expected "
<< expected.framerate_fps[tl] << ") ";
@@ -173,7 +173,7 @@ class TestVideoSender : public ::testing::Test {
TestVideoSender() : clock_(1000), packetization_callback_(&clock_) {}
virtual void SetUp() {
- sender_.reset(new VideoSender(0, &clock_));
+ sender_.reset(new VideoSender(&clock_, &post_encode_callback_));
EXPECT_EQ(0, sender_->InitializeSender());
EXPECT_EQ(0, sender_->RegisterTransportCallback(&packetization_callback_));
}
@@ -185,6 +185,7 @@ class TestVideoSender : public ::testing::Test {
SimulatedClock clock_;
PacketizationCallback packetization_callback_;
+ MockEncodedImageCallback post_encode_callback_;
scoped_ptr<VideoSender> sender_;
scoped_ptr<FrameGenerator> generator_;
};
@@ -344,6 +345,8 @@ class TestVideoSenderWithVp8 : public TestVideoSender {
void InsertFrames(float framerate, float seconds) {
for (int i = 0; i < seconds * framerate; ++i) {
clock_.AdvanceTimeMilliseconds(1000.0f / framerate);
+ EXPECT_CALL(post_encode_callback_, Encoded(_, NULL, NULL))
+ .WillOnce(Return(0));
AddFrame();
// SetChannelParameters needs to be called frequently to propagate
diff --git a/chromium/third_party/webrtc/modules/video_coding/utility/OWNERS b/chromium/third_party/webrtc/modules/video_coding/utility/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_coding/utility/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/interface/video_processing.h b/chromium/third_party/webrtc/modules/video_processing/main/interface/video_processing.h
index b3e0483d06a..817d43d9bd6 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/interface/video_processing.h
+++ b/chromium/third_party/webrtc/modules/video_processing/main/interface/video_processing.h
@@ -236,14 +236,6 @@ class VideoProcessingModule : public Module {
uint32_t frame_rate) = 0;
/**
- Set max frame rate
- \param[in] max_frame_rate: maximum frame rate (limited to native frame rate)
-
- \return VPM_OK on success, a negative value on error (see error codes)
- */
- virtual int32_t SetMaxFramerate(uint32_t max_frame_rate) = 0;
-
- /**
Get decimated(target) frame rate
*/
virtual uint32_t Decimatedframe_rate() = 0;
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/OWNERS b/chromium/third_party/webrtc/modules/video_processing/main/source/OWNERS
new file mode 100644
index 00000000000..3ee6b4bf5f9
--- /dev/null
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/OWNERS
@@ -0,0 +1,5 @@
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/brighten.cc b/chromium/third_party/webrtc/modules/video_processing/main/source/brighten.cc
index ffabbf7fff7..907a549064a 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/brighten.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/brighten.cc
@@ -12,22 +12,15 @@
#include <stdlib.h>
-#include "webrtc/system_wrappers/interface/trace.h"
-
namespace webrtc {
namespace VideoProcessing {
int32_t Brighten(I420VideoFrame* frame, int delta) {
assert(frame);
if (frame->IsZeroSize()) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
- "zero size frame");
return VPM_PARAMETER_ERROR;
}
-
if (frame->width() <= 0 || frame->height() <= 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
- "Invalid frame size");
return VPM_PARAMETER_ERROR;
}
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/brightness_detection.cc b/chromium/third_party/webrtc/modules/video_processing/main/source/brightness_detection.cc
index 8817bac434b..f33117d1376 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/brightness_detection.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/brightness_detection.cc
@@ -10,7 +10,6 @@
#include "webrtc/modules/video_processing/main/interface/video_processing.h"
#include "webrtc/modules/video_processing/main/source/brightness_detection.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include <math.h>
@@ -37,16 +36,12 @@ int32_t VPMBrightnessDetection::ProcessFrame(
const I420VideoFrame& frame,
const VideoProcessingModule::FrameStats& stats) {
if (frame.IsZeroSize()) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
- "Null frame pointer");
return VPM_PARAMETER_ERROR;
}
int width = frame.width();
int height = frame.height();
if (!VideoProcessingModule::ValidFrameStats(stats)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
- "Invalid frame stats");
return VPM_PARAMETER_ERROR;
}
@@ -58,7 +53,7 @@ int32_t VPMBrightnessDetection::ProcessFrame(
for (uint32_t i = 0; i < low_th; i++) {
prop_low += stats.hist[i];
}
-prop_low /= stats.num_pixels;
+ prop_low /= stats.num_pixels;
// Get proportion in highest bins.
unsigned char high_th = 230;
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/color_enhancement.cc b/chromium/third_party/webrtc/modules/video_processing/main/source/color_enhancement.cc
index eeec01659c0..aaa3a462256 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/color_enhancement.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/color_enhancement.cc
@@ -12,44 +12,38 @@
#include "webrtc/modules/video_processing/main/source/color_enhancement.h"
#include "webrtc/modules/video_processing/main/source/color_enhancement_private.h"
-#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
namespace VideoProcessing {
int32_t ColorEnhancement(I420VideoFrame* frame) {
-assert(frame);
-// Pointers to U and V color pixels.
-uint8_t* ptr_u;
-uint8_t* ptr_v;
-uint8_t temp_chroma;
-if (frame->IsZeroSize()) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing,
- -1, "Null frame pointer");
- return VPM_GENERAL_ERROR;
-}
-
-if (frame->width() == 0 || frame->height() == 0) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing,
- -1, "Invalid frame size");
- return VPM_GENERAL_ERROR;
-}
-
-// Set pointers to first U and V pixels (skip luminance).
-ptr_u = frame->buffer(kUPlane);
-ptr_v = frame->buffer(kVPlane);
-int size_uv = ((frame->width() + 1) / 2) * ((frame->height() + 1) / 2);
-
-// Loop through all chrominance pixels and modify color.
-for (int ix = 0; ix < size_uv; ix++) {
- temp_chroma = colorTable[*ptr_u][*ptr_v];
- *ptr_v = colorTable[*ptr_v][*ptr_u];
- *ptr_u = temp_chroma;
-
- ptr_u++;
- ptr_v++;
-}
-return VPM_OK;
+ assert(frame);
+ // Pointers to U and V color pixels.
+ uint8_t* ptr_u;
+ uint8_t* ptr_v;
+ uint8_t temp_chroma;
+ if (frame->IsZeroSize()) {
+ return VPM_GENERAL_ERROR;
+ }
+ if (frame->width() == 0 || frame->height() == 0) {
+ return VPM_GENERAL_ERROR;
+ }
+
+ // Set pointers to first U and V pixels (skip luminance).
+ ptr_u = frame->buffer(kUPlane);
+ ptr_v = frame->buffer(kVPlane);
+ int size_uv = ((frame->width() + 1) / 2) * ((frame->height() + 1) / 2);
+
+ // Loop through all chrominance pixels and modify color.
+ for (int ix = 0; ix < size_uv; ix++) {
+ temp_chroma = colorTable[*ptr_u][*ptr_v];
+ *ptr_v = colorTable[*ptr_v][*ptr_u];
+ *ptr_u = temp_chroma;
+
+ ptr_u++;
+ ptr_v++;
+ }
+ return VPM_OK;
}
} // namespace VideoProcessing
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/deflickering.cc b/chromium/third_party/webrtc/modules/video_processing/main/source/deflickering.cc
index 898fd80f473..cdc6174883f 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/deflickering.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/deflickering.cc
@@ -14,8 +14,8 @@
#include <stdlib.h>
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/sort.h"
-#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
@@ -102,21 +102,16 @@ int32_t VPMDeflickering::ProcessFrame(I420VideoFrame* frame,
int height = frame->height();
if (frame->IsZeroSize()) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
- "Null frame pointer");
return VPM_GENERAL_ERROR;
}
// Stricter height check due to subsampling size calculation below.
if (height < 2) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
- "Invalid frame size");
+ LOG(LS_ERROR) << "Invalid frame size.";
return VPM_GENERAL_ERROR;
}
if (!VideoProcessingModule::ValidFrameStats(*stats)) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
- "Invalid frame stats");
return VPM_GENERAL_ERROR;
}
@@ -152,8 +147,7 @@ int32_t VPMDeflickering::ProcessFrame(I420VideoFrame* frame,
// Ensure we won't get an overflow below.
// In practice, the number of subsampled pixels will not become this large.
if (y_sub_size > (1 << 21) - 1) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
- "Subsampled number of pixels too large");
+ LOG(LS_ERROR) << "Subsampled number of pixels too large.";
return -1;
}
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/denoising.cc b/chromium/third_party/webrtc/modules/video_processing/main/source/denoising.cc
index 79c4bcc3d1b..4c8dcb439f6 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/denoising.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/denoising.cc
@@ -9,7 +9,6 @@
*/
#include "webrtc/modules/video_processing/main/source/denoising.h"
-#include "webrtc/system_wrappers/interface/trace.h"
#include <string.h>
@@ -78,8 +77,6 @@ int32_t VPMDenoising::ProcessFrame(I420VideoFrame* frame) {
int32_t num_pixels_changed = 0;
if (frame->IsZeroSize()) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, id_,
- "zero size frame");
return VPM_GENERAL_ERROR;
}
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.cc b/chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.cc
index de4907029bc..e1cd04ff711 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.cc
@@ -9,14 +9,12 @@
*/
#include "webrtc/modules/video_processing/main/source/frame_preprocessor.h"
-#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
VPMFramePreprocessor::VPMFramePreprocessor()
: id_(0),
content_metrics_(NULL),
- max_frame_rate_(0),
resampled_frame_(),
enable_ca_(false),
frame_cnt_(0) {
@@ -60,14 +58,6 @@ void VPMFramePreprocessor::SetInputFrameResampleMode(
spatial_resampler_->SetInputFrameResampleMode(resampling_mode);
}
-int32_t VPMFramePreprocessor::SetMaxFramerate(uint32_t max_frame_rate) {
- if (max_frame_rate == 0) return VPM_PARAMETER_ERROR;
-
- // Max allowed frame_rate.
- max_frame_rate_ = max_frame_rate;
- return vd_->SetMaxFramerate(max_frame_rate);
-}
-
int32_t VPMFramePreprocessor::SetTargetResolution(
uint32_t width, uint32_t height, uint32_t frame_rate) {
if ( (width == 0) || (height == 0) || (frame_rate == 0)) {
@@ -78,7 +68,7 @@ int32_t VPMFramePreprocessor::SetTargetResolution(
if (ret_val < 0) return ret_val;
- ret_val = vd_->SetTargetframe_rate(frame_rate);
+ ret_val = vd_->SetTargetFramerate(frame_rate);
if (ret_val < 0) return ret_val;
return VPM_OK;
@@ -112,8 +102,6 @@ int32_t VPMFramePreprocessor::PreprocessFrame(const I420VideoFrame& frame,
vd_->UpdateIncomingframe_rate();
if (vd_->DropFrame()) {
- WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, id_,
- "Drop frame due to frame rate");
return 1; // drop 1 frame
}
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.h b/chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.h
index ca62d38fc6d..64a5797b9bc 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.h
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/frame_preprocessor.h
@@ -39,9 +39,6 @@ class VPMFramePreprocessor {
// Enable content analysis.
void EnableContentAnalysis(bool enable);
- // Set max frame rate.
- int32_t SetMaxFramerate(uint32_t max_frame_rate);
-
// Set target resolution: frame rate and dimension.
int32_t SetTargetResolution(uint32_t width, uint32_t height,
uint32_t frame_rate);
@@ -68,7 +65,6 @@ class VPMFramePreprocessor {
int32_t id_;
VideoContentMetrics* content_metrics_;
- uint32_t max_frame_rate_;
I420VideoFrame resampled_frame_;
VPMSpatialResampler* spatial_resampler_;
VPMContentAnalysis* ca_;
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.cc b/chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.cc
index 8fd3d036919..bf05bd71545 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.cc
@@ -16,15 +16,7 @@
namespace webrtc {
-VPMVideoDecimator::VPMVideoDecimator()
- : overshoot_modifier_(0),
- drop_count_(0),
- keep_count_(0),
- target_frame_rate_(30),
- incoming_frame_rate_(0.0f),
- max_frame_rate_(30),
- incoming_frame_times_(),
- enable_temporal_decimation_(true) {
+VPMVideoDecimator::VPMVideoDecimator() {
Reset();
}
@@ -36,7 +28,6 @@ void VPMVideoDecimator::Reset() {
keep_count_ = 0;
target_frame_rate_ = 30;
incoming_frame_rate_ = 0.0f;
- max_frame_rate_ = 30;
memset(incoming_frame_times_, 0, sizeof(incoming_frame_times_));
enable_temporal_decimation_ = true;
}
@@ -45,26 +36,10 @@ void VPMVideoDecimator::EnableTemporalDecimation(bool enable) {
enable_temporal_decimation_ = enable;
}
-int32_t VPMVideoDecimator::SetMaxFramerate(uint32_t max_frame_rate) {
- if (max_frame_rate == 0) return VPM_PARAMETER_ERROR;
-
- max_frame_rate_ = max_frame_rate;
-
- if (target_frame_rate_ > max_frame_rate_)
- target_frame_rate_ = max_frame_rate_;
-
- return VPM_OK;
-}
-
-int32_t VPMVideoDecimator::SetTargetframe_rate(uint32_t frame_rate) {
+int32_t VPMVideoDecimator::SetTargetFramerate(uint32_t frame_rate) {
if (frame_rate == 0) return VPM_PARAMETER_ERROR;
- if (frame_rate > max_frame_rate_) {
- // Override.
- target_frame_rate_ = max_frame_rate_;
- } else {
- target_frame_rate_ = frame_rate;
- }
+ target_frame_rate_ = frame_rate;
return VPM_OK;
}
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.h b/chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.h
index d17da618802..fca74aeae15 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.h
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/video_decimator.h
@@ -25,8 +25,7 @@ class VPMVideoDecimator {
void EnableTemporalDecimation(bool enable);
- int32_t SetMaxFramerate(uint32_t max_frame_rate);
- int32_t SetTargetframe_rate(uint32_t frame_rate);
+ int32_t SetTargetFramerate(uint32_t frame_rate);
bool DropFrame();
@@ -50,7 +49,6 @@ class VPMVideoDecimator {
uint32_t keep_count_;
uint32_t target_frame_rate_;
float incoming_frame_rate_;
- uint32_t max_frame_rate_;
int64_t incoming_frame_times_[kFrameCountHistory_size];
bool enable_temporal_decimation_;
};
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.cc b/chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.cc
index af1bfe1a412..3560030c86c 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.cc
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.cc
@@ -11,7 +11,7 @@
#include "webrtc/modules/video_processing/main/source/video_processing_impl.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
-#include "webrtc/system_wrappers/interface/trace.h"
+#include "webrtc/system_wrappers/interface/logging.h"
#include <assert.h>
@@ -68,13 +68,9 @@ VideoProcessingModuleImpl::VideoProcessingModuleImpl(const int32_t id)
deflickering_.ChangeUniqueId(id);
denoising_.ChangeUniqueId(id);
frame_pre_processor_.ChangeUniqueId(id);
- WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, id_,
- "Created");
}
VideoProcessingModuleImpl::~VideoProcessingModuleImpl() {
- WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, id_,
- "Destroyed");
delete &mutex_;
}
@@ -89,8 +85,7 @@ void VideoProcessingModuleImpl::Reset() {
int32_t VideoProcessingModule::GetFrameStats(FrameStats* stats,
const I420VideoFrame& frame) {
if (frame.IsZeroSize()) {
- WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
- "zero size frame");
+ LOG(LS_ERROR) << "Zero size frame.";
return VPM_PARAMETER_ERROR;
}
@@ -121,7 +116,10 @@ int32_t VideoProcessingModule::GetFrameStats(FrameStats* stats,
}
bool VideoProcessingModule::ValidFrameStats(const FrameStats& stats) {
- if (stats.num_pixels == 0) return false;
+ if (stats.num_pixels == 0) {
+ LOG(LS_WARNING) << "Invalid frame stats.";
+ return false;
+ }
return true;
}
@@ -173,11 +171,6 @@ void VideoProcessingModuleImpl::SetInputFrameResampleMode(VideoFrameResampling
frame_pre_processor_.SetInputFrameResampleMode(resampling_mode);
}
-int32_t VideoProcessingModuleImpl::SetMaxFramerate(uint32_t max_frame_rate) {
- CriticalSectionScoped cs(&mutex_);
- return frame_pre_processor_.SetMaxFramerate(max_frame_rate);
-}
-
int32_t VideoProcessingModuleImpl::SetTargetResolution(uint32_t width,
uint32_t height,
uint32_t frame_rate) {
diff --git a/chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.h b/chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.h
index 913bb648364..deae6ff657e 100644
--- a/chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.h
+++ b/chromium/third_party/webrtc/modules/video_processing/main/source/video_processing_impl.h
@@ -51,9 +51,6 @@ class VideoProcessingModuleImpl : public VideoProcessingModule {
// Enable content analysis
virtual void EnableContentAnalysis(bool enable);
- // Set max frame rate
- virtual int32_t SetMaxFramerate(uint32_t max_frame_rate);
-
// Set Target Resolution: frame rate and dimension
virtual int32_t SetTargetResolution(uint32_t width,
uint32_t height,
diff --git a/chromium/third_party/webrtc/modules/video_render/OWNERS b/chromium/third_party/webrtc/modules/video_render/OWNERS
index 5e8ed090910..7dd4a3624b0 100644
--- a/chromium/third_party/webrtc/modules/video_render/OWNERS
+++ b/chromium/third_party/webrtc/modules/video_render/OWNERS
@@ -3,3 +3,10 @@ mflodman@webrtc.org
perkj@webrtc.org
wu@webrtc.org
mallinath@webrtc.org
+
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
diff --git a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc b/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc
index 2057e282f48..c2afbbd6396 100644
--- a/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc
+++ b/chromium/third_party/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc
@@ -245,7 +245,6 @@ AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(
AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel() {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"AndroidNativeOpenGl2Channel dtor");
- delete &_renderCritSect;
if (_jvm) {
// get the JNI env for this thread
bool isAttached = false;
@@ -277,6 +276,8 @@ AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel() {
}
}
}
+
+ delete &_renderCritSect;
}
int32_t AndroidNativeOpenGl2Channel::Init(int32_t zOrder,
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.mm b/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.mm
index 0707a714b9f..9dfa69d1b79 100644
--- a/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.mm
+++ b/chromium/third_party/webrtc/modules/video_render/ios/open_gles20.mm
@@ -8,6 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
// This files is mostly copied from
// webrtc/modules/video_render/android/video_render_opengles20.h
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.mm b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.mm
index 2a3ca180f72..02814b22234 100644
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.mm
+++ b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_channel.mm
@@ -8,6 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
#include "webrtc/modules/video_render/ios/video_render_ios_channel.h"
using namespace webrtc;
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.mm b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.mm
index 7bc46bfdad5..bbada099edf 100644
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.mm
+++ b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_gles20.mm
@@ -8,6 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
#include "webrtc/modules/video_render/ios/video_render_ios_gles20.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
@@ -246,7 +250,7 @@ int VideoRenderIosGles20::GetWindowRect(Rect& rect) {
int VideoRenderIosGles20::ChangeWindow(void* new_window) {
CriticalSectionScoped cs(gles_crit_sec_.get());
- view_ = (VideoRenderIosView*)new_window;
+ view_ = (__bridge VideoRenderIosView*)new_window;
return 0;
}
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.h b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.h
index 138a524a0db..e38ed7ae4b1 100644
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.h
+++ b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.h
@@ -99,7 +99,7 @@ class VideoRenderIosImpl : IVideoRender {
bool full_screen_;
CriticalSectionWrapper* crit_sec_;
- VideoRenderIosGles20* ptr_ios_render_;
+ webrtc::scoped_ptr<VideoRenderIosGles20> ptr_ios_render_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.mm b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.mm
index 089b1e76988..9b8e1d62438 100644
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.mm
+++ b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_impl.mm
@@ -8,6 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
#include "webrtc/modules/video_render/ios/video_render_ios_impl.h"
#include "webrtc/modules/video_render/ios/video_render_ios_gles20.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
@@ -33,18 +37,13 @@ VideoRenderIosImpl::VideoRenderIosImpl(const int32_t id,
VideoRenderIosImpl::~VideoRenderIosImpl() {
delete crit_sec_;
-
- if (ptr_ios_render_) {
- delete ptr_ios_render_;
- ptr_ios_render_ = NULL;
- }
}
int32_t VideoRenderIosImpl::Init() {
CriticalSectionScoped cs(crit_sec_);
- ptr_ios_render_ = new VideoRenderIosGles20(
- (VideoRenderIosView*)ptr_window_, full_screen_, id_);
+ ptr_ios_render_.reset(new VideoRenderIosGles20(
+ (__bridge VideoRenderIosView*)ptr_window_, full_screen_, id_));
return ptr_ios_render_->Init();
;
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.h b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.h
index db053560607..915c0f71e20 100644
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.h
+++ b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.h
@@ -16,15 +16,7 @@
#include "webrtc/modules/video_render/ios/open_gles20.h"
-@interface VideoRenderIosView : UIView {
- @private // NOLINT
- EAGLContext* context_;
- webrtc::OpenGles20* gles_renderer20_;
- int _frameBufferWidth;
- int _frameBufferHeight;
- unsigned int _defaultFrameBuffer;
- unsigned int _colorRenderBuffer;
-}
+@interface VideoRenderIosView : UIView
- (BOOL)createContext;
- (BOOL)presentFramebuffer;
diff --git a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.mm b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.mm
index 662c9676a61..2e00e097eec 100644
--- a/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.mm
+++ b/chromium/third_party/webrtc/modules/video_render/ios/video_render_ios_view.mm
@@ -8,12 +8,23 @@
* be found in the AUTHORS file in the root of the source tree.
*/
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
#include "webrtc/system_wrappers/interface/trace.h"
using namespace webrtc;
-@implementation VideoRenderIosView
+@implementation VideoRenderIosView {
+ EAGLContext* _context;
+ webrtc::scoped_ptr<webrtc::OpenGles20> _gles_renderer20;
+ int _frameBufferWidth;
+ int _frameBufferHeight;
+ unsigned int _defaultFrameBuffer;
+ unsigned int _colorRenderBuffer;
+}
@synthesize context = context_;
@@ -25,7 +36,7 @@ using namespace webrtc;
// init super class
self = [super initWithCoder:coder];
if (self) {
- gles_renderer20_ = new OpenGles20();
+ _gles_renderer20.reset(new OpenGles20());
}
return self;
}
@@ -34,7 +45,7 @@ using namespace webrtc;
// init super class
self = [super init];
if (self) {
- gles_renderer20_ = new OpenGles20();
+ _gles_renderer20.reset(new OpenGles20());
}
return self;
}
@@ -43,7 +54,7 @@ using namespace webrtc;
// init super class
self = [super initWithFrame:frame];
if (self) {
- gles_renderer20_ = new OpenGles20();
+ _gles_renderer20.reset(new OpenGles20());
}
return self;
}
@@ -59,13 +70,7 @@ using namespace webrtc;
_colorRenderBuffer = 0;
}
- context_ = nil;
-
- if (gles_renderer20_) {
- delete gles_renderer20_;
- }
-
- [super dealloc];
+ [EAGLContext setCurrentContext:nil];
}
- (NSString*)description {
@@ -84,14 +89,13 @@ using namespace webrtc;
kEAGLColorFormatRGBA8,
kEAGLDrawablePropertyColorFormat,
nil];
- context_ = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
+ _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
- if (!context_) {
+ if (!_context) {
return NO;
}
- // set current EAGLContext to self context_
- if (![EAGLContext setCurrentContext:context_]) {
+ if (![EAGLContext setCurrentContext:_context]) {
return NO;
}
@@ -102,7 +106,7 @@ using namespace webrtc;
// Create color render buffer and allocate backing store.
glGenRenderbuffers(1, &_colorRenderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderBuffer);
- [context_ renderbufferStorage:GL_RENDERBUFFER
+ [_context renderbufferStorage:GL_RENDERBUFFER
fromDrawable:(CAEAGLLayer*)self.layer];
glGetRenderbufferParameteriv(
GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_frameBufferWidth);
@@ -121,12 +125,12 @@ using namespace webrtc;
glBindFramebuffer(GL_FRAMEBUFFER, _defaultFrameBuffer);
glViewport(0, 0, self.frame.size.width, self.frame.size.height);
- return gles_renderer20_->Setup([self bounds].size.width,
+ return _gles_renderer20->Setup([self bounds].size.width,
[self bounds].size.height);
}
- (BOOL)presentFramebuffer {
- if (![context_ presentRenderbuffer:GL_RENDERBUFFER]) {
+ if (![_context presentRenderbuffer:GL_RENDERBUFFER]) {
WEBRTC_TRACE(kTraceWarning,
kTraceVideoRenderer,
0,
@@ -135,21 +139,15 @@ using namespace webrtc;
__FUNCTION__,
__LINE__);
}
-
- // update UI stuff on the main thread
- [self performSelectorOnMainThread:@selector(setNeedsDisplay)
- withObject:nil
- waitUntilDone:NO];
-
return YES;
}
- (BOOL)renderFrame:(I420VideoFrame*)frameToRender {
- if (![EAGLContext setCurrentContext:context_]) {
+ if (![EAGLContext setCurrentContext:_context]) {
return NO;
}
- return gles_renderer20_->Render(*frameToRender);
+ return _gles_renderer20->Render(*frameToRender);
}
- (BOOL)setCoordinatesForZOrder:(const float)zOrder
@@ -157,7 +155,7 @@ using namespace webrtc;
Top:(const float)top
Right:(const float)right
Bottom:(const float)bottom {
- return gles_renderer20_->SetCoordinates(zOrder, left, top, right, bottom);
+ return _gles_renderer20->SetCoordinates(zOrder, left, top, right, bottom);
}
@end
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render.gypi b/chromium/third_party/webrtc/modules/video_render/video_render.gypi
index 4f3844406e0..71d969baba2 100644
--- a/chromium/third_party/webrtc/modules/video_render/video_render.gypi
+++ b/chromium/third_party/webrtc/modules/video_render/video_render.gypi
@@ -87,6 +87,12 @@
'android/video_render_android_surface_view.cc',
'android/video_render_opengles20.cc',
],
+ }, {
+ 'link_settings': {
+ 'libraries': [
+ '-lGLESv2',
+ ],
+ },
}],
['OS!="ios" or include_internal_video_render==0', {
'sources!': [
@@ -112,6 +118,12 @@
'linux/video_x11_channel.cc',
'linux/video_x11_render.cc',
],
+ }, {
+ 'link_settings': {
+ 'libraries': [
+ '-lXext',
+ ],
+ },
}],
['OS!="mac" or include_internal_video_render==0', {
'sources!': [
@@ -130,6 +142,9 @@
],
}],
['OS=="ios"', {
+ 'xcode_settings': {
+ 'CLANG_ENABLE_OBJC_ARC': 'YES',
+ },
'all_dependent_settings': {
'xcode_settings': {
'OTHER_LDFLAGS': [
@@ -205,11 +220,11 @@
],
}],
['OS=="linux"', {
- 'libraries': [
- '-lrt',
- '-lXext',
- '-lX11',
- ],
+ 'link_settings': {
+ 'libraries': [
+ '-lX11',
+ ],
+ },
}],
['OS=="mac"', {
'xcode_settings': {
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render_frames.cc b/chromium/third_party/webrtc/modules/video_render/video_render_frames.cc
index be5cac9aaa6..d790877e31f 100644
--- a/chromium/third_party/webrtc/modules/video_render/video_render_frames.cc
+++ b/chromium/third_party/webrtc/modules/video_render/video_render_frames.cc
@@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "webrtc/modules/video_render//video_render_frames.h"
+#include "webrtc/modules/video_render/video_render_frames.h"
#include <assert.h>
@@ -19,13 +19,12 @@
namespace webrtc {
-const int32_t KEventMaxWaitTimeMs = 200;
+const uint32_t KEventMaxWaitTimeMs = 200;
const uint32_t kMinRenderDelayMs = 10;
const uint32_t kMaxRenderDelayMs= 500;
VideoRenderFrames::VideoRenderFrames()
- : incoming_frames_(),
- render_delay_ms_(10) {
+ : render_delay_ms_(10) {
}
VideoRenderFrames::~VideoRenderFrames() {
@@ -35,12 +34,19 @@ VideoRenderFrames::~VideoRenderFrames() {
int32_t VideoRenderFrames::AddFrame(I420VideoFrame* new_frame) {
const int64_t time_now = TickTime::MillisecondTimestamp();
- if (new_frame->render_time_ms() + KOldRenderTimestampMS < time_now) {
- WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
+ // Drop old frames only when there are other frames in the queue, otherwise, a
+ // really slow system never renders any frames.
+ if (!incoming_frames_.empty() &&
+ new_frame->render_time_ms() + KOldRenderTimestampMS < time_now) {
+ WEBRTC_TRACE(kTraceWarning,
+ kTraceVideoRenderer,
+ -1,
"%s: too old frame, timestamp=%u.",
- __FUNCTION__, new_frame->timestamp());
+ __FUNCTION__,
+ new_frame->timestamp());
return -1;
}
+
if (new_frame->render_time_ms() > time_now + KFutureRenderTimestampMS) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
"%s: frame too long into the future, timestamp=%u.",
@@ -49,26 +55,18 @@ int32_t VideoRenderFrames::AddFrame(I420VideoFrame* new_frame) {
}
if (new_frame->native_handle() != NULL) {
- incoming_frames_.PushBack(new TextureVideoFrame(
- static_cast<NativeHandle*>(new_frame->native_handle()),
- new_frame->width(),
- new_frame->height(),
- new_frame->timestamp(),
- new_frame->render_time_ms()));
- return incoming_frames_.GetSize();
+ incoming_frames_.push_back(new_frame->CloneFrame());
+ return static_cast<int32_t>(incoming_frames_.size());
}
// Get an empty frame
I420VideoFrame* frame_to_add = NULL;
- if (!empty_frames_.Empty()) {
- ListItem* item = empty_frames_.First();
- if (item) {
- frame_to_add = static_cast<I420VideoFrame*>(item->GetItem());
- empty_frames_.Erase(item);
- }
+ if (!empty_frames_.empty()) {
+ frame_to_add = empty_frames_.front();
+ empty_frames_.pop_front();
}
if (!frame_to_add) {
- if (empty_frames_.GetSize() + incoming_frames_.GetSize() >
+ if (empty_frames_.size() + incoming_frames_.size() >
KMaxNumberOfFrames) {
// Already allocated too many frames.
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer,
@@ -80,7 +78,7 @@ int32_t VideoRenderFrames::AddFrame(I420VideoFrame* new_frame) {
// Allocate new memory.
WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, -1,
"%s: allocating buffer %d", __FUNCTION__,
- empty_frames_.GetSize() + incoming_frames_.GetSize());
+ empty_frames_.size() + incoming_frames_.size());
frame_to_add = new I420VideoFrame();
if (!frame_to_add) {
@@ -97,33 +95,28 @@ int32_t VideoRenderFrames::AddFrame(I420VideoFrame* new_frame) {
// TODO(mflodman) Change this!
// Remove const ness. Copying will be costly.
frame_to_add->SwapFrame(new_frame);
- incoming_frames_.PushBack(frame_to_add);
+ incoming_frames_.push_back(frame_to_add);
- return incoming_frames_.GetSize();
+ return static_cast<int32_t>(incoming_frames_.size());
}
I420VideoFrame* VideoRenderFrames::FrameToRender() {
I420VideoFrame* render_frame = NULL;
- while (!incoming_frames_.Empty()) {
- ListItem* item = incoming_frames_.First();
- if (item) {
- I420VideoFrame* oldest_frame_in_list =
- static_cast<I420VideoFrame*>(item->GetItem());
- if (oldest_frame_in_list->render_time_ms() <=
- TickTime::MillisecondTimestamp() + render_delay_ms_) {
- // This is the oldest one so far and it's OK to render.
- if (render_frame) {
- // This one is older than the newly found frame, remove this one.
- ReturnFrame(render_frame);
- }
- render_frame = oldest_frame_in_list;
- incoming_frames_.Erase(item);
- } else {
- // We can't release this one yet, we're done here.
- break;
+ FrameList::iterator iter = incoming_frames_.begin();
+ while(iter != incoming_frames_.end()) {
+ I420VideoFrame* oldest_frame_in_list = *iter;
+ if (oldest_frame_in_list->render_time_ms() <=
+ TickTime::MillisecondTimestamp() + render_delay_ms_) {
+ // This is the oldest one so far and it's OK to render.
+ if (render_frame) {
+ // This one is older than the newly found frame, remove this one.
+ ReturnFrame(render_frame);
}
+ render_frame = oldest_frame_in_list;
+ iter = incoming_frames_.erase(iter);
} else {
- assert(false);
+ // We can't release this one yet, we're done here.
+ break;
}
}
return render_frame;
@@ -135,7 +128,7 @@ int32_t VideoRenderFrames::ReturnFrame(I420VideoFrame* old_frame) {
old_frame->ResetSize();
old_frame->set_timestamp(0);
old_frame->set_render_time_ms(0);
- empty_frames_.PushBack(old_frame);
+ empty_frames_.push_back(old_frame);
} else {
delete old_frame;
}
@@ -143,40 +136,29 @@ int32_t VideoRenderFrames::ReturnFrame(I420VideoFrame* old_frame) {
}
int32_t VideoRenderFrames::ReleaseAllFrames() {
- while (!incoming_frames_.Empty()) {
- ListItem* item = incoming_frames_.First();
- if (item) {
- I420VideoFrame* frame = static_cast<I420VideoFrame*>(item->GetItem());
- assert(frame != NULL);
- delete frame;
- }
- incoming_frames_.Erase(item);
+ for (FrameList::iterator iter = incoming_frames_.begin();
+ iter != incoming_frames_.end(); ++iter) {
+ delete *iter;
}
- while (!empty_frames_.Empty()) {
- ListItem* item = empty_frames_.First();
- if (item) {
- I420VideoFrame* frame = static_cast<I420VideoFrame*>(item->GetItem());
- assert(frame != NULL);
- delete frame;
- }
- empty_frames_.Erase(item);
+ incoming_frames_.clear();
+
+ for (FrameList::iterator iter = empty_frames_.begin();
+ iter != empty_frames_.end(); ++iter) {
+ delete *iter;
}
+ empty_frames_.clear();
return 0;
}
uint32_t VideoRenderFrames::TimeToNextFrameRelease() {
- int64_t time_to_release = 0;
- ListItem* item = incoming_frames_.First();
- if (item) {
- I420VideoFrame* oldest_frame =
- static_cast<I420VideoFrame*>(item->GetItem());
- time_to_release = oldest_frame->render_time_ms() - render_delay_ms_
- - TickTime::MillisecondTimestamp();
- if (time_to_release < 0) {
- time_to_release = 0;
- }
- } else {
- time_to_release = KEventMaxWaitTimeMs;
+ if (incoming_frames_.empty()) {
+ return KEventMaxWaitTimeMs;
+ }
+ I420VideoFrame* oldest_frame = incoming_frames_.front();
+ int64_t time_to_release = oldest_frame->render_time_ms() - render_delay_ms_
+ - TickTime::MillisecondTimestamp();
+ if (time_to_release < 0) {
+ time_to_release = 0;
}
return static_cast<uint32_t>(time_to_release);
}
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render_frames.h b/chromium/third_party/webrtc/modules/video_render/video_render_frames.h
index 1a5d64d8072..d2e887f8272 100644
--- a/chromium/third_party/webrtc/modules/video_render/video_render_frames.h
+++ b/chromium/third_party/webrtc/modules/video_render/video_render_frames.h
@@ -11,8 +11,9 @@
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_ // NOLINT
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_ // NOLINT
+#include <list>
+
#include "webrtc/modules/video_render/include/video_render.h"
-#include "webrtc/system_wrappers/interface/list_wrapper.h"
namespace webrtc {
@@ -41,6 +42,8 @@ class VideoRenderFrames {
int32_t SetRenderDelay(const uint32_t render_delay);
private:
+ typedef std::list<I420VideoFrame*> FrameList;
+
// 10 seconds for 30 fps.
enum { KMaxNumberOfFrames = 300 };
// Don't render frames with timestamp older than 500ms from now.
@@ -49,9 +52,9 @@ class VideoRenderFrames {
enum { KFutureRenderTimestampMS = 10000 };
// Sorted list with framed to be rendered, oldest first.
- ListWrapper incoming_frames_;
+ FrameList incoming_frames_;
// Empty frames.
- ListWrapper empty_frames_;
+ FrameList empty_frames_;
// Estimated delay from a frame is released until it's rendered.
uint32_t render_delay_ms_;
diff --git a/chromium/third_party/webrtc/modules/video_render/video_render_tests.isolate b/chromium/third_party/webrtc/modules/video_render/video_render_tests.isolate
index 397ec04e8c7..15c80141dcb 100644
--- a/chromium/third_party/webrtc/modules/video_render/video_render_tests.isolate
+++ b/chromium/third_party/webrtc/modules/video_render/video_render_tests.isolate
@@ -8,27 +8,25 @@
{
'conditions': [
['OS=="android"', {
- # When doing Android builds, the WebRTC code is put in third_party/webrtc
- # of a Chromium checkout, this is one level above the standalone build.
'variables': {
'isolate_dependency_untracked': [
- '../../../../data/',
- '../../../../resources/',
+ '<(DEPTH)/data/',
+ '<(DEPTH)/resources/',
],
},
}],
['OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'command': [
- '../../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/video_render_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_tracked': [
- '../../../testing/test_env.py',
+ '<(DEPTH)/testing/test_env.py',
'<(PRODUCT_DIR)/video_render_tests<(EXECUTABLE_SUFFIX)',
],
'isolate_dependency_untracked': [
- '../../../tools/swarming_client/',
+ '<(DEPTH)/tools/swarming_client/',
],
},
}],